diff --git a/WHAT_WE_DID.md b/WHAT_WE_DID.md index 78cf5ef83..090bab468 100644 --- a/WHAT_WE_DID.md +++ b/WHAT_WE_DID.md @@ -16,3 +16,4 @@ CAS, edit graph, context editing (6 ops), side threads, objective tracker, class - **#24:** Zod v4 migration (zodToJsonSchema → z.toJSONSchema), 25 Frankencode unit tests, tracking docs cleanup - **#25:** Upstream catalogue (162 commits + ~195 PRs), security audit (2 CVEs, 5 issues), 6-phase roadmap - **#26:** Phase 1 security fixes: S1 symlink bypass, S2 exec→spawn, S4 server auth, S5 sensitive deny-list, S3 warning (13 tests) +- **#27:** Phase 2 upstream fixes: prompt parts (#17815), thinkingConfig guard (#18283), chunk timeout (#18264), error messages (#18165), event queue (#18259) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index c85426cc2..249a48d35 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -13,6 +13,7 @@ import { MessageID, PartID } from "@/session/schema" import { createStore, produce } from "solid-js/store" import { useKeybind } from "@tui/context/keybind" import { usePromptHistory, type PromptInfo } from "./history" +import { assign } from "./part" import { usePromptStash } from "./stash" import { DialogStash } from "../dialog-stash" import { type AutocompleteRef, Autocomplete } from "./autocomplete" @@ -643,10 +644,7 @@ export function Prompt(props: PromptProps) { type: "text", text: inputText, }, - ...nonTextParts.map((x) => ({ - id: PartID.ascending(), - ...x, - })), + ...nonTextParts.map(assign), ], }) .catch(() => {}) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/part.ts b/packages/opencode/src/cli/cmd/tui/component/prompt/part.ts new file mode 100644 index 000000000..8cdcef606 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/part.ts @@ -0,0 +1,16 @@ +import { PartID } from "@/session/schema" +import type { PromptInfo } from "./history" + +type Item = PromptInfo["parts"][number] + +export function strip(part: Item & { id: string; messageID: string; sessionID: string }): Item { + const { id: _id, messageID: _messageID, sessionID: _sessionID, ...rest } = part + return rest +} + +export function assign(part: Item): Item & { id: PartID } { + return { + ...part, + id: PartID.ascending(), + } +} diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx index 62154cce5..742d51be2 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx @@ -7,6 +7,7 @@ import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" import { useDialog } from "../../ui/dialog" import type { PromptInfo } from "@tui/component/prompt/history" +import { strip } from "@tui/component/prompt/part" export function DialogForkFromTimeline(props: { sessionID: string; onMove: (messageID: string) => void }) { const sync = useSync() @@ -42,7 +43,7 @@ export function DialogForkFromTimeline(props: { sessionID: string; onMove: (mess if (part.type === "text") { if (!part.synthetic) agg.input += part.text } - if (part.type === "file") agg.parts.push(part) + if (part.type === "file") agg.parts.push(strip(part)) return agg }, { input: "", parts: [] as PromptInfo["parts"] }, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx index ff17b5567..a51a6cfe5 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx @@ -5,6 +5,7 @@ import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" import { Clipboard } from "@tui/util/clipboard" import type { PromptInfo } from "@tui/component/prompt/history" +import { strip } from "@tui/component/prompt/part" export function DialogMessage(props: { messageID: string @@ -40,7 +41,7 @@ export function DialogMessage(props: { if (part.type === "text") { if (!part.synthetic) agg.input += part.text } - if (part.type === "file") agg.parts.push(part) + if (part.type === "file") agg.parts.push(strip(part)) return agg }, { input: "", parts: [] as PromptInfo["parts"] }, diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index a892b01e1..2d8baf488 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -50,7 +50,8 @@ import { Installation } from "../installation" import { ModelID, ProviderID } from "./schema" import { JsonValue } from "@/util/json" -const DEFAULT_CHUNK_TIMEOUT = 300_000 +// Chunk timeout disabled by default — prevents false timeouts on slow providers +// (upstream #18264 by James Long). Enable via provider config chunkTimeout option. // Provider SDK layer: each AI SDK provider (OpenAI, Anthropic, Google, etc.) returns a unique type // with different methods (.responses, .chat, .languageModel). The BUNDLED_PROVIDERS dispatch table, @@ -1187,7 +1188,7 @@ export namespace Provider { if (existing) return existing const customFetch = options["fetch"] as unknown as typeof globalThis.fetch | undefined - const chunkTimeout = (options["chunkTimeout"] as number) || DEFAULT_CHUNK_TIMEOUT + const chunkTimeout = options["chunkTimeout"] as number | undefined delete options["chunkTimeout"] // @ts-expect-error fetch function stored in JSON options object @@ -1230,7 +1231,7 @@ export namespace Provider { timeout: false, }) - if (!chunkAbortCtl) return res + if (!chunkAbortCtl || !chunkTimeout) return res return wrapSSE(res, chunkTimeout, chunkAbortCtl) } diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 6f19f797f..054ffc55e 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -759,11 +759,14 @@ export namespace ProviderTransform { } if (input.model.api.npm === "@ai-sdk/google" || input.model.api.npm === "@ai-sdk/google-vertex") { - result["thinkingConfig"] = { - includeThoughts: true, - } - if (input.model.api.id.includes("gemini-3")) { - result["thinkingConfig"]["thinkingLevel"] = "high" + // Only set thinkingConfig for models with reasoning capability (upstream #18283 by Protocol Zero) + if (input.model.capabilities.reasoning) { + result["thinkingConfig"] = { + includeThoughts: true, + } + if (input.model.api.id.includes("gemini-3")) { + result["thinkingConfig"]["thinkingLevel"] = "high" + } } } diff --git a/packages/opencode/src/server/routes/global.ts b/packages/opencode/src/server/routes/global.ts index de58b5351..264f5362a 100644 --- a/packages/opencode/src/server/routes/global.ts +++ b/packages/opencode/src/server/routes/global.ts @@ -77,13 +77,33 @@ export const GlobalRoutes = lazy(() => }, }), }) - async function handler(event: { + // Queue events to prevent backpressure when they arrive faster than + // the SSE stream can flush (upstream #18259 by James Long) + const queue: Array<{ + directory?: string + payload: { type: string; properties: Record } + }> = [] + let flushing = false + async function flush() { + if (flushing) return + flushing = true + while (queue.length > 0) { + const event = queue.shift()! + try { + await stream.writeSSE({ data: JSON.stringify(event) }) + } catch { + // Client disconnected — stop flushing + break + } + } + flushing = false + } + function handler(event: { directory?: string payload: { type: string; properties: Record } }) { - await stream.writeSSE({ - data: JSON.stringify(event), - }) + queue.push(event) + flush() } GlobalBus.on("event", handler) diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index effee565e..4b3177d00 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -1091,7 +1091,8 @@ export namespace MessageV2 { { cause: e }, ).toObject() case e instanceof Error: - return new NamedError.Unknown({ message: e.toString() }, { cause: e }).toObject() + // Use e.message instead of e.toString() to preserve tagged error messages (upstream #18165 by Kit Langton) + return new NamedError.Unknown({ message: e.message }, { cause: e }).toObject() default: try { const parsed = ProviderError.parseStreamError(e) diff --git a/packages/opencode/test/cli/cmd/tui/prompt-part.test.ts b/packages/opencode/test/cli/cmd/tui/prompt-part.test.ts new file mode 100644 index 000000000..326d3e624 --- /dev/null +++ b/packages/opencode/test/cli/cmd/tui/prompt-part.test.ts @@ -0,0 +1,47 @@ +import { describe, expect, test } from "bun:test" +import type { PromptInfo } from "../../../../src/cli/cmd/tui/component/prompt/history" +import { assign, strip } from "../../../../src/cli/cmd/tui/component/prompt/part" + +describe("prompt part", () => { + test("strip removes persisted ids from reused file parts", () => { + const part = { + id: "prt_old", + sessionID: "ses_old", + messageID: "msg_old", + type: "file" as const, + mime: "image/png", + filename: "tiny.png", + url: "data:image/png;base64,abc", + } + + expect(strip(part)).toEqual({ + type: "file", + mime: "image/png", + filename: "tiny.png", + url: "data:image/png;base64,abc", + }) + }) + + test("assign overwrites stale runtime ids", () => { + const part = { + id: "prt_old", + sessionID: "ses_old", + messageID: "msg_old", + type: "file" as const, + mime: "image/png", + filename: "tiny.png", + url: "data:image/png;base64,abc", + } as PromptInfo["parts"][number] + + const next = assign(part) + + expect(next.id).not.toBe("prt_old") + expect(next.id.startsWith("prt_")).toBe(true) + expect(next).toMatchObject({ + type: "file", + mime: "image/png", + filename: "tiny.png", + url: "data:image/png;base64,abc", + }) + }) +})