Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions src/extension/byok/vscode-node/anthropicProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -161,14 +161,16 @@ export class AnthropicLMProvider implements BYOKModelProvider<LanguageModelChatI
messages: anthropicMessagesToRawMessagesForLogging(convertedMessages, system),
ourRequestId: requestId,
location: ChatLocation.Other,
tools: options.tools?.map((tool): OpenAiFunctionTool => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema
}
})),
body: {
tools: options.tools?.map((tool): OpenAiFunctionTool => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema
}
}))
},
Comment on lines +164 to +173
Copy link

Copilot AI Nov 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The body object only contains the tools field but could contain additional request parameters that may already be present in options. Consider whether other relevant fields from the request options should also be included in the body object for logging consistency (e.g., max_tokens, temperature, prediction). This would ensure comprehensive logging of the request parameters.

Copilot uses AI. Check for mistakes.
});

// Check if memory tool is present
Expand Down
18 changes: 10 additions & 8 deletions src/extension/byok/vscode-node/geminiNativeProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,14 +108,16 @@ export class GeminiNativeBYOKLMProvider implements BYOKModelProvider<LanguageMod
messages: geminiMessagesToRawMessagesForLogging(contents, systemInstruction),
ourRequestId: requestId,
location: ChatLocation.Other,
tools: options.tools?.map((tool): OpenAiFunctionTool => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema
}
})),
body: {
tools: options.tools?.map((tool): OpenAiFunctionTool => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema
}
}))
Copy link

Copilot AI Nov 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Similar to the Anthropic provider, the body object only contains the tools field. Consider whether other relevant fields from the request options should also be included in the body object for logging consistency (e.g., max_tokens, temperature, prediction). This would ensure comprehensive logging of the request parameters across all providers.

Suggested change
}))
})),
max_tokens: options.maxTokens,
temperature: options.temperature,
prediction: options.prediction

Copilot uses AI. Check for mistakes.
}
});

// Convert VS Code tools to Gemini function declarations
Expand Down
2 changes: 1 addition & 1 deletion src/extension/conversation/vscode-node/feedbackReporter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ export class FeedbackReporter extends Disposable implements IFeedbackReporter {
const responseDump = this._embedCodeblock('ASSISTANT', turn.responseMessage?.message || '');
const workspaceState = await this._instantiationService.createInstance(WorkspaceStateSnapshotHelper).captureWorkspaceStateSnapshot([]);
const workspaceStateDump = this._embedCodeblock('WORKSPACE STATE', JSON.stringify(workspaceState, null, 2));
const toolsDump = params?.tools ? this._embedCodeblock('TOOLS', JSON.stringify(params.tools, null, 2)) : '';
const toolsDump = params?.body?.tools ? this._embedCodeblock('TOOLS', JSON.stringify(params.body.tools, null, 2)) : '';
const metadata = this._embedCodeblock('METADATA', `requestID: ${turn.id}\nmodel: ${params?.model}`);
const edits = (await this._editLogService.getEditLog(turn.id))?.map((edit, i) => {
return this._embedCodeblock(`EDIT ${i + 1}`, JSON.stringify(edit, null, 2));
Expand Down
2 changes: 0 additions & 2 deletions src/extension/prompt/node/chatMLFetcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,7 @@ export class ChatMLFetcherImpl extends AbstractChatMLFetcher {
model: chatEndpoint.model,
ourRequestId,
location: opts.location,
postOptions,
body: requestBody,
tools: requestBody.tools,
ignoreStatefulMarker: opts.ignoreStatefulMarker
});
let tokenCount = -1;
Expand Down
30 changes: 7 additions & 23 deletions src/extension/prompt/vscode-node/requestLoggerImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { ILogService } from '../../../platform/log/common/logService';
import { messageToMarkdown } from '../../../platform/log/common/messageStringify';
import { IResponseDelta } from '../../../platform/networking/common/fetch';
import { IEndpointBody } from '../../../platform/networking/common/networking';
import { AbstractRequestLogger, ChatRequestScheme, ILoggedElementInfo, ILoggedPendingRequest, ILoggedRequestInfo, ILoggedToolCall, LoggedInfo, LoggedInfoKind, LoggedRequest, LoggedRequestKind } from '../../../platform/requestLogger/node/requestLogger';
import { AbstractRequestLogger, ChatRequestScheme, ILoggedElementInfo, ILoggedRequestInfo, ILoggedToolCall, LoggedInfo, LoggedInfoKind, LoggedRequest, LoggedRequestKind } from '../../../platform/requestLogger/node/requestLogger';
import { ThinkingData } from '../../../platform/thinking/common/thinking';
import { createFencedCodeBlock } from '../../../util/common/markdown';
import { assertNever } from '../../../util/vs/base/common/assert';
Expand Down Expand Up @@ -106,21 +106,6 @@ class LoggedRequestInfo implements ILoggedRequestInfo {
};
}

// Extract prediction and tools like _renderRequestToMarkdown does
let prediction: string | undefined;
let tools;
const postOptions = this.entry.chatParams.postOptions && { ...this.entry.chatParams.postOptions };
if (typeof postOptions?.prediction?.content === 'string') {
prediction = postOptions.prediction.content;
postOptions.prediction = undefined;
}
if ((this.entry.chatParams as ILoggedPendingRequest).tools) {
tools = (this.entry.chatParams as ILoggedPendingRequest).tools;
if (postOptions) {
postOptions.tools = undefined;
}
}

// Handle stateful marker like _renderRequestToMarkdown does
let lastResponseId: { marker: string; modelId: string } | undefined;
if (!this.entry.chatParams.ignoreStatefulMarker) {
Expand Down Expand Up @@ -167,9 +152,8 @@ class LoggedRequestInfo implements ILoggedRequestInfo {
this.entry.chatEndpoint.urlOrRequestMetadata?.type : undefined,
model: this.entry.chatParams.model,
maxPromptTokens: this.entry.chatEndpoint.modelMaxPromptTokens,
maxResponseTokens: this.entry.chatParams.postOptions?.max_tokens,
maxResponseTokens: this.entry.chatParams.body?.max_tokens,
location: this.entry.chatParams.location,
postOptions: postOptions,
reasoning: this.entry.chatParams.body?.reasoning,
intent: this.entry.chatParams.intent,
startTime: this.entry.startTime?.toISOString(),
Expand All @@ -182,12 +166,12 @@ class LoggedRequestInfo implements ILoggedRequestInfo {
serverRequestId: this.entry.type === LoggedRequestKind.ChatMLSuccess || this.entry.type === LoggedRequestKind.ChatMLFailure ? this.entry.result.serverRequestId : undefined,
timeToFirstToken: this.entry.type === LoggedRequestKind.ChatMLSuccess ? this.entry.timeToFirstToken : undefined,
usage: this.entry.type === LoggedRequestKind.ChatMLSuccess ? this.entry.usage : undefined,
tools: tools,
tools: this.entry.chatParams.body?.tools,
};

const requestMessages = {
messages: this.entry.chatParams.messages,
prediction: prediction
prediction: this.entry.chatParams.body?.prediction
};

const response = responseData || errorInfo ? {
Expand Down Expand Up @@ -573,7 +557,7 @@ export class RequestLogger extends AbstractRequestLogger {
}
result.push(`model : ${entry.chatParams.model}`);
result.push(`maxPromptTokens : ${entry.chatEndpoint.modelMaxPromptTokens}`);
result.push(`maxResponseTokens: ${entry.chatParams.postOptions?.max_tokens}`);
result.push(`maxResponseTokens: ${entry.chatParams.body?.max_tokens}`);
result.push(`location : ${entry.chatParams.location}`);
result.push(`otherOptions : ${JSON.stringify(otherOptions)}`);
if (entry.chatParams.body?.reasoning) {
Expand Down Expand Up @@ -606,8 +590,8 @@ export class RequestLogger extends AbstractRequestLogger {
result.push(`requestId : ${entry.result.requestId}`);
result.push(`serverRequestId : ${entry.result.serverRequestId}`);
}
if (entry.chatParams.tools) {
result.push(`tools : ${JSON.stringify(entry.chatParams.tools, undefined, 4)}`);
if (entry.chatParams.body?.tools) {
result.push(`tools : ${JSON.stringify(entry.chatParams.body.tools, undefined, 4)}`);
}
result.push(`~~~`);

Expand Down
5 changes: 3 additions & 2 deletions src/platform/endpoint/vscode-node/extChatEndpoint.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,8 +178,9 @@ export class ExtensionContributedChatEndpoint implements IChatEndpoint {
model: this.model,
ourRequestId,
location,
postOptions: requestOptions,
tools: requestOptions?.tools,
body: {
...requestOptions
},
ignoreStatefulMarker: true
})
: undefined;
Expand Down
3 changes: 1 addition & 2 deletions src/platform/requestLogger/node/requestLogger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { HTMLTracer, IChatEndpointInfo, Raw, RenderPromptResult } from '@vscode/
import { AsyncLocalStorage } from 'async_hooks';
import type { Event } from 'vscode';
import { ChatFetchError, ChatFetchResponseType, ChatLocation, ChatResponses, FetchSuccess } from '../../../platform/chat/common/commonTypes';
import { IResponseDelta, OpenAiFunctionTool, OpenAiResponsesFunctionTool, OptionalChatRequestParams } from '../../../platform/networking/common/fetch';
import { IResponseDelta, OptionalChatRequestParams } from '../../../platform/networking/common/fetch';
import { IChatEndpoint, IEndpointBody } from '../../../platform/networking/common/networking';
import { createServiceIdentifier } from '../../../util/common/services';
import { Disposable } from '../../../util/vs/base/common/lifecycle';
Expand Down Expand Up @@ -128,7 +128,6 @@ export interface ILoggedToolCall {

export interface ILoggedPendingRequest {
messages: Raw.ChatMessage[];
tools: (OpenAiFunctionTool | OpenAiResponsesFunctionTool)[] | undefined;
ourRequestId: string;
model: string;
location: ChatLocation;
Expand Down