Skip to content

Commit c05e4cc

Browse files
rekram1-nodehugojosefson
authored andcommitted
tweak: move the max token exclusions to plugins @rekram1-node (anomalyco#21225)
1 parent 8db28eb commit c05e4cc

File tree

4 files changed

+15
-8
lines changed

4 files changed

+15
-8
lines changed

packages/opencode/src/plugin/codex.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -599,5 +599,10 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> {
599599
output.headers["User-Agent"] = `opencode/${Installation.VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`
600600
output.headers.session_id = input.sessionID
601601
},
602+
"chat.params": async (input, output) => {
603+
if (input.model.providerID !== "openai") return
604+
// Match codex cli
605+
output.maxOutputTokens = undefined
606+
},
602607
}
603608
}

packages/opencode/src/plugin/github-copilot/copilot.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -309,6 +309,14 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
309309
},
310310
],
311311
},
312+
"chat.params": async (incoming, output) => {
313+
if (!incoming.model.providerID.includes("github-copilot")) return
314+
315+
// Match github copilot cli, omit maxOutputTokens for gpt models
316+
if (incoming.model.api.id.includes("gpt")) {
317+
output.maxOutputTokens = undefined
318+
}
319+
},
312320
"chat.headers": async (incoming, output) => {
313321
if (!incoming.model.providerID.includes("github-copilot")) return
314322

packages/opencode/src/session/llm.ts

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -160,11 +160,6 @@ export namespace LLM {
160160
...input.messages,
161161
]
162162

163-
const maxOutputTokens =
164-
isOpenaiOauth || provider.id.includes("github-copilot")
165-
? undefined
166-
: ProviderTransform.maxOutputTokens(input.model)
167-
168163
const params = await Plugin.trigger(
169164
"chat.params",
170165
{
@@ -180,7 +175,7 @@ export namespace LLM {
180175
: undefined,
181176
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
182177
topK: ProviderTransform.topK(input.model),
183-
maxOutputTokens,
178+
maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
184179
options,
185180
},
186181
)

packages/opencode/test/session/llm.test.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -743,8 +743,7 @@ describe("session.llm.stream", () => {
743743
expect((body.reasoning as { effort?: string } | undefined)?.effort).toBe("high")
744744

745745
const maxTokens = body.max_output_tokens as number | undefined
746-
const expectedMaxTokens = ProviderTransform.maxOutputTokens(resolved)
747-
expect(maxTokens).toBe(expectedMaxTokens)
746+
expect(maxTokens).toBe(undefined) // match codex cli behavior
748747
},
749748
})
750749
})

0 commit comments

Comments
 (0)