Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4478,7 +4478,7 @@
"monaco-editor": "0.44.0",
"npm-run-all": "^4.1.5",
"open": "^10.1.2",
"openai": "^5.11.0",
"openai": "^6.7.0",
"outdent": "^0.8.0",
"picomatch": "^4.0.2",
"playwright": "^1.56.1",
Expand Down
17 changes: 16 additions & 1 deletion src/extension/externalAgents/node/oaiLanguageModelServer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { RequestMetadata } from '@vscode/copilot-api';
import { Raw } from '@vscode/prompt-tsx';
import * as http from 'http';
import { ClientHttp2Stream } from 'http2';
import OpenAI from 'openai';
import type OpenAI from 'openai';
import { IChatMLFetcher, Source } from '../../../platform/chat/common/chatMLFetcher';
import { ChatLocation, ChatResponse } from '../../../platform/chat/common/commonTypes';
import { CustomModel, EndpointEditToolName, IEndpointProvider } from '../../../platform/endpoint/common/endpointProvider';
Expand Down Expand Up @@ -132,6 +132,16 @@ export class OpenAILanguageModelServer extends Disposable {

try {
const requestBody: OpenAI.Responses.ResponseCreateParams = JSON.parse(bodyString);
if (Array.isArray(requestBody.tools)) {
requestBody.tools = requestBody.tools.filter(tool => {
if (typeof tool?.type === 'string' && tool.type.startsWith('web_search')) {
this.warn(`Filtering out unsupported tool type: ${JSON.stringify(tool)}`);
return false;
}

return true;
});
}
const lastMessage = requestBody.input?.at(-1);
const isUserInitiatedMessage = typeof lastMessage === 'string' ||
lastMessage?.type === 'message' && lastMessage.role === 'user';
Expand Down Expand Up @@ -274,6 +284,11 @@ export class OpenAILanguageModelServer extends Disposable {
const messageWithClassName = `[OpenAILanguageModelServer] ${message}`;
this.logService.trace(messageWithClassName);
}

private warn(message: string): void {
const messageWithClassName = `[OpenAILanguageModelServer] ${message}`;
this.logService.warn(messageWithClassName);
}
}

class StreamingPassThroughEndpoint implements IChatEndpoint {
Expand Down
50 changes: 44 additions & 6 deletions src/platform/endpoint/node/responsesApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -251,14 +251,18 @@ export function responseApiInputToRawMessagesForLogging(body: OpenAI.Responses.R
}
});
break;
case 'function_call_output':
case 'function_call_output': {
flushPendingFunctionCalls();
messages.push({
role: Raw.ChatRole.Tool,
content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: item.output }],
toolCallId: item.call_id
});
if (isResponseFunctionCallOutputItem(item)) {
const content = responseFunctionOutputToRawContents(item.output);
messages.push({
role: Raw.ChatRole.Tool,
content,
toolCallId: item.call_id
});
}
Comment on lines +256 to +263
Copy link

Copilot AI Nov 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The type guard check on line 256 is redundant. Since we're already inside a case 'function_call_output': block (line 254), the item type is already narrowed to have type: 'function_call_output'. The type guard isResponseFunctionCallOutputItem performs the same check again. Consider removing the conditional wrapper and calling responseFunctionOutputToRawContents directly.

Suggested change
if (isResponseFunctionCallOutputItem(item)) {
const content = responseFunctionOutputToRawContents(item.output);
messages.push({
role: Raw.ChatRole.Tool,
content,
toolCallId: item.call_id
});
}
const content = responseFunctionOutputToRawContents(item.output);
messages.push({
role: Raw.ChatRole.Tool,
content,
toolCallId: item.call_id
});

Copilot uses AI. Check for mistakes.
break;
}
case 'reasoning':
// We can't perfectly reconstruct the original thinking data
// but we can add a placeholder for logging
Expand Down Expand Up @@ -295,6 +299,10 @@ function isResponseInputItemMessage(item: OpenAI.Responses.ResponseInputItem): i
return 'role' in item && item.role === 'assistant' && (!('type' in item) || item.type !== 'message');
}

function isResponseFunctionCallOutputItem(item: OpenAI.Responses.ResponseInputItem): item is OpenAI.Responses.ResponseInputItem.FunctionCallOutput {
return 'type' in item && item.type === 'function_call_output';
}

function ensureContentArray(content: string | OpenAI.Responses.ResponseInputMessageContentList): OpenAI.Responses.ResponseInputMessageContentList {
if (typeof content === 'string') {
return [{ type: 'input_text', text: content }];
Expand Down Expand Up @@ -329,6 +337,36 @@ function responseOutputToRawContent(part: OpenAI.Responses.ResponseOutputText |
}
}

function responseFunctionOutputItemToRawContent(part: OpenAI.Responses.ResponseFunctionCallOutputItem): Raw.ChatCompletionContentPart | undefined {
if (part.type === 'input_text') {
return { type: Raw.ChatCompletionContentPartKind.Text, text: part.text };
}
if (part.type === 'input_image') {
const detail = part.detail && part.detail !== 'auto' ? part.detail : undefined;
return {
type: Raw.ChatCompletionContentPartKind.Image,
imageUrl: {
url: part.image_url || '',
detail
}
};
}
if (part.type === 'input_file') {
return {
type: Raw.ChatCompletionContentPartKind.Opaque,
value: `[File Output - Filename: ${part.filename || 'unknown'}]`
};
}
return undefined;
}
Comment on lines +340 to +361
Copy link

Copilot AI Nov 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This function duplicates logic from responseContentToRawContent (lines 313-329). Both functions handle input_text, input_image, and input_file types identically. Consider extracting the common conversion logic into a shared function or reusing responseContentToRawContent if the types are compatible, to maintain DRY principles and reduce maintenance burden.

Copilot uses AI. Check for mistakes.

function responseFunctionOutputToRawContents(output: string | OpenAI.Responses.ResponseFunctionCallOutputItemList): Raw.ChatCompletionContentPart[] {
if (typeof output === 'string') {
return [{ type: Raw.ChatCompletionContentPartKind.Text, text: output }];
}
return coalesce(output.map(responseFunctionOutputItemToRawContent));
}

export async function processResponseFromChatEndpoint(instantiationService: IInstantiationService, telemetryService: ITelemetryService, logService: ILogService, response: Response, expectedNumChoices: number, finishCallback: FinishedCallback, telemetryData: TelemetryData): Promise<AsyncIterableObject<ChatCompletion>> {
const body = (await response.body()) as ClientHttp2Stream;
return new AsyncIterableObject<ChatCompletion>(async feed => {
Expand Down