Skip to content

Commit 8a8a982

Browse files
Merge pull request #384 from voideditor/model-selection
Model selection
2 parents 5fde919 + 7f890b9 commit 8a8a982

File tree

2 files changed

+28
-9
lines changed

2 files changed

+28
-9
lines changed

src/vs/workbench/contrib/chat/browser/actions/chatActions.ts

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -654,10 +654,12 @@ const defaultChat = {
654654
submenu: MenuId.ChatTitleBarMenu,
655655
title: localize('title4', "Copilot"),
656656
icon: Codicon.copilot,
657-
when: ContextKeyExpr.and(
658-
ChatContextKeys.supported,
659-
ContextKeyExpr.has('config.chat.commandCenter.enabled')
660-
),
657+
// Void commented this out - copilot head
658+
when: ContextKeyExpr.false(),
659+
// when: ContextKeyExpr.and(
660+
// ChatContextKeys.supported,
661+
// ContextKeyExpr.has('config.chat.commandCenter.enabled')
662+
// ),
661663
order: 10001 // to the right of command center
662664
});
663665
@@ -667,11 +669,13 @@ MenuRegistry.appendMenuItem(MenuId.TitleBar, {
667669
title: localize('title4', "Copilot"),
668670
group: 'navigation',
669671
icon: Codicon.copilot,
670-
when: ContextKeyExpr.and(
671-
ChatContextKeys.supported,
672-
ContextKeyExpr.has('config.chat.commandCenter.enabled'),
673-
ContextKeyExpr.has('config.window.commandCenter').negate(),
674-
),
672+
when: ContextKeyExpr.false(),
673+
// Void commented this out - copilot head
674+
// when: ContextKeyExpr.and(
675+
// ChatContextKeys.supported,
676+
// ContextKeyExpr.has('config.chat.commandCenter.enabled'),
677+
// ContextKeyExpr.has('config.window.commandCenter').negate(),
678+
// ),
675679
order: 1
676680
}); */
677681

src/vs/workbench/contrib/void/browser/chatThreadService.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -661,6 +661,7 @@ class ChatThreadService extends Disposable implements IChatThreadService {
661661
// above just defines helpers, below starts the actual function
662662
const { chatMode } = this._settingsService.state.globalSettings // should not change as we loop even if user changes it, so it goes here
663663

664+
console.log('a', chatMode)
664665
// clear any previous error
665666
this._setStreamState(threadId, { error: undefined }, 'set')
666667

@@ -669,11 +670,13 @@ class ChatThreadService extends Disposable implements IChatThreadService {
669670
let isRunningWhenEnd: IsRunningType = undefined
670671
let aborted = false
671672

673+
console.log('b')
672674
// before enter loop, call tool
673675
if (callThisToolFirst) {
674676
const { interrupted } = await this._runToolCall(threadId, callThisToolFirst.name, { preapproved: true, validatedParams: callThisToolFirst.params })
675677
if (interrupted) return
676678
}
679+
console.log('c')
677680

678681
// tool use loop
679682
while (shouldSendAnotherMessage) {
@@ -685,14 +688,17 @@ class ChatThreadService extends Disposable implements IChatThreadService {
685688
let resMessageIsDonePromise: (toolCall?: RawToolCallObj | undefined) => void // resolves when user approves this tool use (or if tool doesn't require approval)
686689
const messageIsDonePromise = new Promise<RawToolCallObj | undefined>((res, rej) => { resMessageIsDonePromise = res })
687690

691+
console.log('d')
688692
// send llm message
689693
this._setStreamState(threadId, { isRunning: 'LLM' }, 'merge')
690694
const systemMessage = await this._generateSystemMessage(chatMode)
695+
console.log('e0')
691696
const llmMessages = await this._generateLLMMessages(threadId)
692697
const messages: LLMChatMessage[] = [
693698
{ role: 'system', content: systemMessage },
694699
...llmMessages
695700
]
701+
console.log('e')
696702

697703
const llmCancelToken = this._llmMessageService.sendLLMMessage({
698704
messagesType: 'chatMessages',
@@ -734,14 +740,20 @@ class ChatThreadService extends Disposable implements IChatThreadService {
734740
break
735741
}
736742
this._setStreamState(threadId, { streamingToken: llmCancelToken }, 'merge') // new stream token for the new message
743+
console.log('waiting...')
737744
const toolCall = await messageIsDonePromise // wait for message to complete
745+
console.log('done!')
738746
if (aborted) { return }
747+
console.log('H')
739748
this._setStreamState(threadId, { streamingToken: undefined }, 'merge') // streaming message is done
749+
console.log('I')
740750

741751
// call tool if there is one
742752
const tool: RawToolCallObj | undefined = toolCall
743753
if (tool) {
754+
console.log('J')
744755
const { awaitingUserApproval, interrupted } = await this._runToolCall(threadId, tool.name, { preapproved: false, unvalidatedToolParams: tool.rawParams })
756+
console.log('K')
745757

746758
// stop if interrupted. we don't have to do this for llmMessage because we have a stream token for it and onAbort gets called, but we don't have the equivalent for tools.
747759
// just detect tool interruption which is the same as chat interruption right now
@@ -756,14 +768,17 @@ class ChatThreadService extends Disposable implements IChatThreadService {
756768
}
757769

758770
} // end while
771+
console.log('L')
759772

760773

761774
// if awaiting user approval, keep isRunning true, else end isRunning
762775
this._setStreamState(threadId, { isRunning: isRunningWhenEnd }, 'merge')
776+
console.log('M')
763777

764778
// add checkpoint before the next user message
765779
if (!isRunningWhenEnd)
766780
this._addUserCheckpoint({ threadId })
781+
console.log('N')
767782

768783
// capture number of messages sent
769784
this._metricsService.capture('Agent Loop Done', { nMessagesSent, chatMode })

0 commit comments

Comments
 (0)