Skip to content

Commit e4bc21f

Browse files
Merge pull request #182 from voideditor/model-selection
UI Improvements and Model Selection
2 parents 7f8e1ff + 81b78b7 commit e4bc21f

File tree

20 files changed

+790
-277
lines changed

20 files changed

+790
-277
lines changed

src/vs/editor/contrib/lineSelection/browser/lineSelection.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ export class ExpandLineSelectionAction extends EditorAction {
2222
kbOpts: {
2323
weight: KeybindingWeight.EditorCore,
2424
kbExpr: EditorContextKeys.textInputFocus,
25-
primary: KeyMod.CtrlCmd | KeyCode.KeyL
25+
primary: KeyMod.CtrlCmd | KeyCode.KeyM // Void changed this to Cmd+M
2626
},
2727
});
2828
}

src/vs/platform/void/common/llmMessageService.ts

Lines changed: 36 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
* Void Editor additions licensed under the AGPL 3.0 License.
44
*--------------------------------------------------------------------------------------------*/
55

6-
import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainLLMMessageParams, MainLLMMessageAbortParams, ServiceOllamaListParams, EventOllamaListOnSuccessParams, EventOllamaListOnErrorParams, MainOllamaListParams } from './llmMessageTypes.js';
6+
import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainLLMMessageParams, MainLLMMessageAbortParams, ServiceModelListParams, EventModelListOnSuccessParams, EventModelListOnErrorParams, MainModelListParams, OllamaModelResponse, OpenaiCompatibleModelResponse, } from './llmMessageTypes.js';
77
import { IChannel } from '../../../base/parts/ipc/common/ipc.js';
88
import { IMainProcessService } from '../../ipc/common/mainProcessService.js';
99
import { InstantiationType, registerSingleton } from '../../instantiation/common/extensions.js';
@@ -21,7 +21,8 @@ export interface ILLMMessageService {
2121
readonly _serviceBrand: undefined;
2222
sendLLMMessage: (params: ServiceSendLLMMessageParams) => string | null;
2323
abort: (requestId: string) => void;
24-
ollamaList: (params: ServiceOllamaListParams) => void;
24+
ollamaList: (params: ServiceModelListParams<OllamaModelResponse>) => void;
25+
openAICompatibleList: (params: ServiceModelListParams<OpenaiCompatibleModelResponse>) => void;
2526
}
2627

2728
export class LLMMessageService extends Disposable implements ILLMMessageService {
@@ -36,9 +37,12 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
3637

3738

3839
// ollamaList
39-
private readonly onSuccess_ollama: { [eventId: string]: ((params: EventOllamaListOnSuccessParams) => void) } = {}
40-
private readonly onError_ollama: { [eventId: string]: ((params: EventOllamaListOnErrorParams) => void) } = {}
40+
private readonly onSuccess_ollama: { [eventId: string]: ((params: EventModelListOnSuccessParams<OllamaModelResponse>) => void) } = {}
41+
private readonly onError_ollama: { [eventId: string]: ((params: EventModelListOnErrorParams<OllamaModelResponse>) => void) } = {}
4142

43+
// openAICompatibleList
44+
private readonly onSuccess_openAICompatible: { [eventId: string]: ((params: EventModelListOnSuccessParams<OpenaiCompatibleModelResponse>) => void) } = {}
45+
private readonly onError_openAICompatible: { [eventId: string]: ((params: EventModelListOnErrorParams<OpenaiCompatibleModelResponse>) => void) } = {}
4246

4347
constructor(
4448
@IMainProcessService private readonly mainProcessService: IMainProcessService, // used as a renderer (only usable on client side)
@@ -65,12 +69,19 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
6569
this._onRequestIdDone(e.requestId)
6670
}))
6771
// ollama
68-
this._register((this.channel.listen('onSuccess_ollama') satisfies Event<EventOllamaListOnSuccessParams>)(e => {
72+
this._register((this.channel.listen('onSuccess_ollama') satisfies Event<EventModelListOnSuccessParams<OllamaModelResponse>>)(e => {
6973
this.onSuccess_ollama[e.requestId]?.(e)
7074
}))
71-
this._register((this.channel.listen('onError_ollama') satisfies Event<EventOllamaListOnErrorParams>)(e => {
75+
this._register((this.channel.listen('onError_ollama') satisfies Event<EventModelListOnErrorParams<OllamaModelResponse>>)(e => {
7276
this.onError_ollama[e.requestId]?.(e)
7377
}))
78+
// openaiCompatible
79+
this._register((this.channel.listen('onSuccess_openAICompatible') satisfies Event<EventModelListOnSuccessParams<OpenaiCompatibleModelResponse>>)(e => {
80+
this.onSuccess_openAICompatible[e.requestId]?.(e)
81+
}))
82+
this._register((this.channel.listen('onError_openAICompatible') satisfies Event<EventModelListOnErrorParams<OpenaiCompatibleModelResponse>>)(e => {
83+
this.onError_openAICompatible[e.requestId]?.(e)
84+
}))
7485

7586
}
7687

@@ -113,7 +124,7 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
113124
}
114125

115126

116-
ollamaList = (params: ServiceOllamaListParams) => {
127+
ollamaList = (params: ServiceModelListParams<OllamaModelResponse>) => {
117128
const { onSuccess, onError, ...proxyParams } = params
118129

119130
const { settingsOfProvider } = this.voidSettingsService.state
@@ -127,7 +138,24 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
127138
...proxyParams,
128139
settingsOfProvider,
129140
requestId: requestId_,
130-
} satisfies MainOllamaListParams)
141+
} satisfies MainModelListParams<OllamaModelResponse>)
142+
}
143+
144+
openAICompatibleList = (params: ServiceModelListParams<OpenaiCompatibleModelResponse>) => {
145+
const { onSuccess, onError, ...proxyParams } = params
146+
147+
const { settingsOfProvider } = this.voidSettingsService.state
148+
149+
// add state for request id
150+
const requestId_ = generateUuid();
151+
this.onSuccess_openAICompatible[requestId_] = onSuccess
152+
this.onError_openAICompatible[requestId_] = onError
153+
154+
this.channel.call('openAICompatibleList', {
155+
...proxyParams,
156+
settingsOfProvider,
157+
requestId: requestId_,
158+
} satisfies MainModelListParams<OpenaiCompatibleModelResponse>)
131159
}
132160

133161

src/vs/platform/void/common/llmMessageTypes.ts

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ export type _InternalSendLLMMessageFnType = (params: {
9797

9898

9999
// These are from 'ollama' SDK
100-
interface ModelDetails {
100+
interface OllamaModelDetails {
101101
parent_model: string;
102102
format: string;
103103
family: string;
@@ -106,35 +106,44 @@ interface ModelDetails {
106106
quantization_level: string;
107107
}
108108

109-
export type ModelResponse = {
109+
export type OllamaModelResponse = {
110110
name: string;
111111
modified_at: Date;
112112
size: number;
113113
digest: string;
114-
details: ModelDetails;
114+
details: OllamaModelDetails;
115115
expires_at: Date;
116116
size_vram: number;
117117
}
118118

119+
export type OpenaiCompatibleModelResponse = {
120+
id: string;
121+
created: number;
122+
object: 'model';
123+
owned_by: string;
124+
}
125+
119126

120127
// params to the true list fn
121-
export type OllamaListParams = {
128+
export type ModelListParams<modelResponse> = {
122129
settingsOfProvider: SettingsOfProvider;
123-
onSuccess: (param: { models: ModelResponse[] }) => void;
130+
onSuccess: (param: { models: modelResponse[] }) => void;
124131
onError: (param: { error: string }) => void;
125132
}
126133

127-
export type ServiceOllamaListParams = {
128-
onSuccess: (param: { models: ModelResponse[] }) => void;
134+
// params to the service
135+
export type ServiceModelListParams<modelResponse> = {
136+
onSuccess: (param: { models: modelResponse[] }) => void;
129137
onError: (param: { error: any }) => void;
130138
}
131139

132-
type BlockedMainOllamaListParams = 'onSuccess' | 'onError'
133-
export type MainOllamaListParams = Omit<OllamaListParams, BlockedMainOllamaListParams> & { requestId: string }
140+
type BlockedMainModelListParams = 'onSuccess' | 'onError'
141+
export type MainModelListParams<modelResponse> = Omit<ModelListParams<modelResponse>, BlockedMainModelListParams> & { requestId: string }
142+
143+
export type EventModelListOnSuccessParams<modelResponse> = Parameters<ModelListParams<modelResponse>['onSuccess']>[0] & { requestId: string }
144+
export type EventModelListOnErrorParams<modelResponse> = Parameters<ModelListParams<modelResponse>['onError']>[0] & { requestId: string }
134145

135-
export type EventOllamaListOnSuccessParams = Parameters<OllamaListParams['onSuccess']>[0] & { requestId: string }
136-
export type EventOllamaListOnErrorParams = Parameters<OllamaListParams['onError']>[0] & { requestId: string }
137146

138147

139148

140-
export type _InternalOllamaListFnType = (params: OllamaListParams) => void
149+
export type _InternalModelListFnType<modelResponse> = (params: ModelListParams<modelResponse>) => void

src/vs/platform/void/common/refreshModelService.ts

Lines changed: 122 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,37 @@ import { InstantiationType, registerSingleton } from '../../instantiation/common
88
import { IVoidSettingsService } from './voidSettingsService.js';
99
import { ILLMMessageService } from './llmMessageService.js';
1010
import { Emitter, Event } from '../../../base/common/event.js';
11-
import { Disposable } from '../../../base/common/lifecycle.js';
11+
import { Disposable, IDisposable } from '../../../base/common/lifecycle.js';
12+
import { ProviderName, SettingsOfProvider } from './voidSettingsTypes.js';
13+
import { OllamaModelResponse, OpenaiCompatibleModelResponse } from './llmMessageTypes.js';
1214

1315

14-
export type RefreshModelState = 'done' | 'loading'
16+
export const refreshableProviderNames = ['ollama', 'openAICompatible'] satisfies ProviderName[]
17+
18+
export type RefreshableProviderName = typeof refreshableProviderNames[number]
19+
20+
21+
type RefreshableState = {
22+
state: 'init',
23+
timeoutId: null,
24+
} | {
25+
state: 'refreshing',
26+
timeoutId: NodeJS.Timeout | null,
27+
} | {
28+
state: 'success',
29+
timeoutId: null,
30+
}
31+
32+
33+
export type RefreshModelStateOfProvider = Record<RefreshableProviderName, RefreshableState>
34+
35+
36+
37+
const refreshBasedOn: { [k in RefreshableProviderName]: (keyof SettingsOfProvider[k])[] } = {
38+
ollama: ['enabled', 'endpoint'],
39+
openAICompatible: ['enabled', 'endpoint', 'apiKey'],
40+
}
41+
const REFRESH_INTERVAL = 5000
1542

1643
// element-wise equals
1744
function eq<T>(a: T[], b: T[]): boolean {
@@ -23,9 +50,9 @@ function eq<T>(a: T[], b: T[]): boolean {
2350
}
2451
export interface IRefreshModelService {
2552
readonly _serviceBrand: undefined;
26-
refreshOllamaModels(): void;
27-
onDidChangeState: Event<void>;
28-
state: RefreshModelState;
53+
refreshModels: (providerName: RefreshableProviderName) => Promise<void>;
54+
onDidChangeState: Event<RefreshableProviderName>;
55+
state: RefreshModelStateOfProvider;
2956
}
3057

3158
export const IRefreshModelService = createDecorator<IRefreshModelService>('RefreshModelService');
@@ -34,71 +61,120 @@ export class RefreshModelService extends Disposable implements IRefreshModelServ
3461

3562
readonly _serviceBrand: undefined;
3663

37-
private readonly _onDidChangeState = new Emitter<void>();
38-
readonly onDidChangeState: Event<void> = this._onDidChangeState.event; // this is primarily for use in react, so react can listen + update on state changes
64+
private readonly _onDidChangeState = new Emitter<RefreshableProviderName>();
65+
readonly onDidChangeState: Event<RefreshableProviderName> = this._onDidChangeState.event; // this is primarily for use in react, so react can listen + update on state changes
3966

4067
constructor(
4168
@IVoidSettingsService private readonly voidSettingsService: IVoidSettingsService,
4269
@ILLMMessageService private readonly llmMessageService: ILLMMessageService,
4370
) {
4471
super()
4572

46-
// on mount, refresh ollama models
47-
this.refreshOllamaModels()
48-
49-
// every time ollama.enabled changes, refresh ollama models, like useEffect
50-
let relevantVals = () => [this.voidSettingsService.state.settingsOfProvider.ollama.enabled, this.voidSettingsService.state.settingsOfProvider.ollama.endpoint]
51-
let prevVals = relevantVals()
52-
this._register(
53-
this.voidSettingsService.onDidChangeState(() => { // we might want to debounce this
54-
const newVals = relevantVals()
55-
if (!eq(prevVals, newVals)) {
56-
this.refreshOllamaModels()
57-
prevVals = newVals
58-
}
59-
})
60-
)
6173

62-
}
74+
const disposables: Set<IDisposable> = new Set()
75+
76+
77+
const startRefreshing = () => {
78+
this._clearAllTimeouts()
79+
disposables.forEach(d => d.dispose())
80+
disposables.clear()
81+
82+
if (!voidSettingsService.state.featureFlagSettings.autoRefreshModels) return
6383

64-
state: RefreshModelState = 'done'
84+
for (const providerName of refreshableProviderNames) {
6585

66-
private _timeoutId: NodeJS.Timeout | null = null
67-
private _cancelTimeout = () => {
68-
if (this._timeoutId) {
69-
clearTimeout(this._timeoutId)
70-
this._timeoutId = null
86+
const refresh = () => {
87+
// const { enabled } = this.voidSettingsService.state.settingsOfProvider[providerName]
88+
this.refreshModels(providerName, { enableProviderOnSuccess: true }) // enable the provider on success
89+
}
90+
91+
refresh()
92+
93+
// every time providerName.enabled changes, refresh models too, like a useEffect
94+
let relevantVals = () => refreshBasedOn[providerName].map(settingName => this.voidSettingsService.state.settingsOfProvider[providerName][settingName])
95+
let prevVals = relevantVals() // each iteration of a for loop has its own context and vars, so this is ok
96+
disposables.add(
97+
this.voidSettingsService.onDidChangeState(() => { // we might want to debounce this
98+
const newVals = relevantVals()
99+
if (!eq(prevVals, newVals)) {
100+
refresh()
101+
prevVals = newVals
102+
}
103+
})
104+
)
105+
}
71106
}
107+
108+
// on mount (when get init settings state), and if a relevant feature flag changes (detected natively right now by refreshing if any flag changes), start refreshing models
109+
voidSettingsService.waitForInitState.then(() => {
110+
startRefreshing()
111+
this._register(
112+
voidSettingsService.onDidChangeState((type) => { if (type === 'featureFlagSettings') startRefreshing() })
113+
)
114+
})
115+
72116
}
73-
async refreshOllamaModels() {
74-
// cancel any existing poll
75-
this._cancelTimeout()
76117

77-
// if ollama is disabled, obivously done
78-
if (!this.voidSettingsService.state.settingsOfProvider.ollama.enabled) {
79-
this._setState('done')
80-
return
81-
}
118+
state: RefreshModelStateOfProvider = {
119+
ollama: { state: 'init', timeoutId: null },
120+
openAICompatible: { state: 'init', timeoutId: null },
121+
}
122+
123+
124+
// start listening for models (and don't stop until success)
125+
async refreshModels(providerName: RefreshableProviderName, options?: { enableProviderOnSuccess?: boolean }) {
126+
this._clearProviderTimeout(providerName)
82127

83128
// start loading models
84-
this._setState('loading')
129+
this._setRefreshState(providerName, 'refreshing')
130+
131+
const fn = providerName === 'ollama' ? this.llmMessageService.ollamaList
132+
: providerName === 'openAICompatible' ? this.llmMessageService.openAICompatibleList
133+
: () => { }
85134

86-
this.llmMessageService.ollamaList({
135+
fn({
87136
onSuccess: ({ models }) => {
88-
this.voidSettingsService.setDefaultModels('ollama', models.map(model => model.name))
89-
this._setState('done')
137+
this.voidSettingsService.setDefaultModels(providerName, models.map(model => {
138+
if (providerName === 'ollama') return (model as OllamaModelResponse).name
139+
else if (providerName === 'openAICompatible') return (model as OpenaiCompatibleModelResponse).id
140+
else throw new Error('refreshMode fn: unknown provider', providerName)
141+
}))
142+
143+
if (options?.enableProviderOnSuccess)
144+
this.voidSettingsService.setSettingOfProvider(providerName, 'enabled', true)
145+
146+
this._setRefreshState(providerName, 'success')
90147
},
91148
onError: ({ error }) => {
92149
// poll
93-
console.log('retrying ollamaList:', error)
94-
this._timeoutId = setTimeout(() => this.refreshOllamaModels(), 5000)
150+
console.log('retrying list models:', providerName, error)
151+
const timeoutId = setTimeout(() => this.refreshModels(providerName, options), REFRESH_INTERVAL)
152+
this._setTimeoutId(providerName, timeoutId)
95153
}
96154
})
97155
}
98156

99-
private _setState(state: RefreshModelState) {
100-
this.state = state
101-
this._onDidChangeState.fire()
157+
_clearAllTimeouts() {
158+
for (const providerName of refreshableProviderNames) {
159+
this._clearProviderTimeout(providerName)
160+
}
161+
}
162+
163+
_clearProviderTimeout(providerName: RefreshableProviderName) {
164+
// cancel any existing poll
165+
if (this.state[providerName].timeoutId) {
166+
clearTimeout(this.state[providerName].timeoutId)
167+
this._setTimeoutId(providerName, null)
168+
}
169+
}
170+
171+
private _setTimeoutId(providerName: RefreshableProviderName, timeoutId: NodeJS.Timeout | null) {
172+
this.state[providerName].timeoutId = timeoutId
173+
}
174+
175+
private _setRefreshState(providerName: RefreshableProviderName, state: RefreshableState['state']) {
176+
this.state[providerName].state = state
177+
this._onDidChangeState.fire(providerName)
102178
}
103179
}
104180

0 commit comments

Comments
 (0)