File tree Expand file tree Collapse file tree 4 files changed +15
-8
lines changed
Expand file tree Collapse file tree 4 files changed +15
-8
lines changed Original file line number Diff line number Diff line change @@ -599,5 +599,10 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> {
599599 output . headers [ "User-Agent" ] = `opencode/${ Installation . VERSION } (${ os . platform ( ) } ${ os . release ( ) } ; ${ os . arch ( ) } )`
600600 output . headers . session_id = input . sessionID
601601 } ,
602+ "chat.params" : async ( input , output ) => {
603+ if ( input . model . providerID !== "openai" ) return
604+ // Match codex cli
605+ output . maxOutputTokens = undefined
606+ } ,
602607 }
603608}
Original file line number Diff line number Diff line change @@ -309,6 +309,14 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
309309 } ,
310310 ] ,
311311 } ,
312+ "chat.params" : async ( incoming , output ) => {
313+ if ( ! incoming . model . providerID . includes ( "github-copilot" ) ) return
314+
315+ // Match github copilot cli, omit maxOutputTokens for gpt models
316+ if ( incoming . model . api . id . includes ( "gpt" ) ) {
317+ output . maxOutputTokens = undefined
318+ }
319+ } ,
312320 "chat.headers" : async ( incoming , output ) => {
313321 if ( ! incoming . model . providerID . includes ( "github-copilot" ) ) return
314322
Original file line number Diff line number Diff line change @@ -160,11 +160,6 @@ export namespace LLM {
160160 ...input . messages ,
161161 ]
162162
163- const maxOutputTokens =
164- isOpenaiOauth || provider . id . includes ( "github-copilot" )
165- ? undefined
166- : ProviderTransform . maxOutputTokens ( input . model )
167-
168163 const params = await Plugin . trigger (
169164 "chat.params" ,
170165 {
@@ -180,7 +175,7 @@ export namespace LLM {
180175 : undefined ,
181176 topP : input . agent . topP ?? ProviderTransform . topP ( input . model ) ,
182177 topK : ProviderTransform . topK ( input . model ) ,
183- maxOutputTokens,
178+ maxOutputTokens : ProviderTransform . maxOutputTokens ( input . model ) ,
184179 options,
185180 } ,
186181 )
Original file line number Diff line number Diff line change @@ -743,8 +743,7 @@ describe("session.llm.stream", () => {
743743 expect ( ( body . reasoning as { effort ?: string } | undefined ) ?. effort ) . toBe ( "high" )
744744
745745 const maxTokens = body . max_output_tokens as number | undefined
746- const expectedMaxTokens = ProviderTransform . maxOutputTokens ( resolved )
747- expect ( maxTokens ) . toBe ( expectedMaxTokens )
746+ expect ( maxTokens ) . toBe ( undefined ) // match codex cli behavior
748747 } ,
749748 } )
750749 } )
You can’t perform that action at this time.
0 commit comments