Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export class ExpandLineSelectionAction extends EditorAction {
kbOpts: {
weight: KeybindingWeight.EditorCore,
kbExpr: EditorContextKeys.textInputFocus,
primary: KeyMod.CtrlCmd | KeyCode.KeyL
primary: KeyMod.CtrlCmd | KeyCode.KeyM // Void changed this to Cmd+M
},
});
}
Expand Down
44 changes: 36 additions & 8 deletions src/vs/platform/void/common/llmMessageService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
* Void Editor additions licensed under the AGPL 3.0 License.
*--------------------------------------------------------------------------------------------*/

import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainLLMMessageParams, MainLLMMessageAbortParams, ServiceOllamaListParams, EventOllamaListOnSuccessParams, EventOllamaListOnErrorParams, MainOllamaListParams } from './llmMessageTypes.js';
import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainLLMMessageParams, MainLLMMessageAbortParams, ServiceModelListParams, EventModelListOnSuccessParams, EventModelListOnErrorParams, MainModelListParams, OllamaModelResponse, OpenaiCompatibleModelResponse, } from './llmMessageTypes.js';
import { IChannel } from '../../../base/parts/ipc/common/ipc.js';
import { IMainProcessService } from '../../ipc/common/mainProcessService.js';
import { InstantiationType, registerSingleton } from '../../instantiation/common/extensions.js';
Expand All @@ -21,7 +21,8 @@ export interface ILLMMessageService {
readonly _serviceBrand: undefined;
sendLLMMessage: (params: ServiceSendLLMMessageParams) => string | null;
abort: (requestId: string) => void;
ollamaList: (params: ServiceOllamaListParams) => void;
ollamaList: (params: ServiceModelListParams<OllamaModelResponse>) => void;
openAICompatibleList: (params: ServiceModelListParams<OpenaiCompatibleModelResponse>) => void;
}

export class LLMMessageService extends Disposable implements ILLMMessageService {
Expand All @@ -36,9 +37,12 @@ export class LLMMessageService extends Disposable implements ILLMMessageService


// ollamaList
private readonly onSuccess_ollama: { [eventId: string]: ((params: EventOllamaListOnSuccessParams) => void) } = {}
private readonly onError_ollama: { [eventId: string]: ((params: EventOllamaListOnErrorParams) => void) } = {}
private readonly onSuccess_ollama: { [eventId: string]: ((params: EventModelListOnSuccessParams<OllamaModelResponse>) => void) } = {}
private readonly onError_ollama: { [eventId: string]: ((params: EventModelListOnErrorParams<OllamaModelResponse>) => void) } = {}

// openAICompatibleList
private readonly onSuccess_openAICompatible: { [eventId: string]: ((params: EventModelListOnSuccessParams<OpenaiCompatibleModelResponse>) => void) } = {}
private readonly onError_openAICompatible: { [eventId: string]: ((params: EventModelListOnErrorParams<OpenaiCompatibleModelResponse>) => void) } = {}

constructor(
@IMainProcessService private readonly mainProcessService: IMainProcessService, // used as a renderer (only usable on client side)
Expand All @@ -65,12 +69,19 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
this._onRequestIdDone(e.requestId)
}))
// ollama
this._register((this.channel.listen('onSuccess_ollama') satisfies Event<EventOllamaListOnSuccessParams>)(e => {
this._register((this.channel.listen('onSuccess_ollama') satisfies Event<EventModelListOnSuccessParams<OllamaModelResponse>>)(e => {
this.onSuccess_ollama[e.requestId]?.(e)
}))
this._register((this.channel.listen('onError_ollama') satisfies Event<EventOllamaListOnErrorParams>)(e => {
this._register((this.channel.listen('onError_ollama') satisfies Event<EventModelListOnErrorParams<OllamaModelResponse>>)(e => {
this.onError_ollama[e.requestId]?.(e)
}))
// openaiCompatible
this._register((this.channel.listen('onSuccess_openAICompatible') satisfies Event<EventModelListOnSuccessParams<OpenaiCompatibleModelResponse>>)(e => {
this.onSuccess_openAICompatible[e.requestId]?.(e)
}))
this._register((this.channel.listen('onError_openAICompatible') satisfies Event<EventModelListOnErrorParams<OpenaiCompatibleModelResponse>>)(e => {
this.onError_openAICompatible[e.requestId]?.(e)
}))

}

Expand Down Expand Up @@ -113,7 +124,7 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
}


ollamaList = (params: ServiceOllamaListParams) => {
ollamaList = (params: ServiceModelListParams<OllamaModelResponse>) => {
const { onSuccess, onError, ...proxyParams } = params

const { settingsOfProvider } = this.voidSettingsService.state
Expand All @@ -127,7 +138,24 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
...proxyParams,
settingsOfProvider,
requestId: requestId_,
} satisfies MainOllamaListParams)
} satisfies MainModelListParams<OllamaModelResponse>)
}

openAICompatibleList = (params: ServiceModelListParams<OpenaiCompatibleModelResponse>) => {
const { onSuccess, onError, ...proxyParams } = params

const { settingsOfProvider } = this.voidSettingsService.state

// add state for request id
const requestId_ = generateUuid();
this.onSuccess_openAICompatible[requestId_] = onSuccess
this.onError_openAICompatible[requestId_] = onError

this.channel.call('openAICompatibleList', {
...proxyParams,
settingsOfProvider,
requestId: requestId_,
} satisfies MainModelListParams<OpenaiCompatibleModelResponse>)
}


Expand Down
33 changes: 21 additions & 12 deletions src/vs/platform/void/common/llmMessageTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ export type _InternalSendLLMMessageFnType = (params: {


// These are from 'ollama' SDK
interface ModelDetails {
interface OllamaModelDetails {
parent_model: string;
format: string;
family: string;
Expand All @@ -106,35 +106,44 @@ interface ModelDetails {
quantization_level: string;
}

export type ModelResponse = {
export type OllamaModelResponse = {
name: string;
modified_at: Date;
size: number;
digest: string;
details: ModelDetails;
details: OllamaModelDetails;
expires_at: Date;
size_vram: number;
}

export type OpenaiCompatibleModelResponse = {
id: string;
created: number;
object: 'model';
owned_by: string;
}


// params to the true list fn
export type OllamaListParams = {
export type ModelListParams<modelResponse> = {
settingsOfProvider: SettingsOfProvider;
onSuccess: (param: { models: ModelResponse[] }) => void;
onSuccess: (param: { models: modelResponse[] }) => void;
onError: (param: { error: string }) => void;
}

export type ServiceOllamaListParams = {
onSuccess: (param: { models: ModelResponse[] }) => void;
// params to the service
export type ServiceModelListParams<modelResponse> = {
onSuccess: (param: { models: modelResponse[] }) => void;
onError: (param: { error: any }) => void;
}

type BlockedMainOllamaListParams = 'onSuccess' | 'onError'
export type MainOllamaListParams = Omit<OllamaListParams, BlockedMainOllamaListParams> & { requestId: string }
type BlockedMainModelListParams = 'onSuccess' | 'onError'
export type MainModelListParams<modelResponse> = Omit<ModelListParams<modelResponse>, BlockedMainModelListParams> & { requestId: string }

export type EventModelListOnSuccessParams<modelResponse> = Parameters<ModelListParams<modelResponse>['onSuccess']>[0] & { requestId: string }
export type EventModelListOnErrorParams<modelResponse> = Parameters<ModelListParams<modelResponse>['onError']>[0] & { requestId: string }

export type EventOllamaListOnSuccessParams = Parameters<OllamaListParams['onSuccess']>[0] & { requestId: string }
export type EventOllamaListOnErrorParams = Parameters<OllamaListParams['onError']>[0] & { requestId: string }



export type _InternalOllamaListFnType = (params: OllamaListParams) => void
export type _InternalModelListFnType<modelResponse> = (params: ModelListParams<modelResponse>) => void
168 changes: 122 additions & 46 deletions src/vs/platform/void/common/refreshModelService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,37 @@ import { InstantiationType, registerSingleton } from '../../instantiation/common
import { IVoidSettingsService } from './voidSettingsService.js';
import { ILLMMessageService } from './llmMessageService.js';
import { Emitter, Event } from '../../../base/common/event.js';
import { Disposable } from '../../../base/common/lifecycle.js';
import { Disposable, IDisposable } from '../../../base/common/lifecycle.js';
import { ProviderName, SettingsOfProvider } from './voidSettingsTypes.js';
import { OllamaModelResponse, OpenaiCompatibleModelResponse } from './llmMessageTypes.js';


export type RefreshModelState = 'done' | 'loading'
export const refreshableProviderNames = ['ollama', 'openAICompatible'] satisfies ProviderName[]

export type RefreshableProviderName = typeof refreshableProviderNames[number]


type RefreshableState = {
state: 'init',
timeoutId: null,
} | {
state: 'refreshing',
timeoutId: NodeJS.Timeout | null,
} | {
state: 'success',
timeoutId: null,
}


export type RefreshModelStateOfProvider = Record<RefreshableProviderName, RefreshableState>



const refreshBasedOn: { [k in RefreshableProviderName]: (keyof SettingsOfProvider[k])[] } = {
ollama: ['enabled', 'endpoint'],
openAICompatible: ['enabled', 'endpoint', 'apiKey'],
}
const REFRESH_INTERVAL = 5000

// element-wise equals
function eq<T>(a: T[], b: T[]): boolean {
Expand All @@ -23,9 +50,9 @@ function eq<T>(a: T[], b: T[]): boolean {
}
export interface IRefreshModelService {
readonly _serviceBrand: undefined;
refreshOllamaModels(): void;
onDidChangeState: Event<void>;
state: RefreshModelState;
refreshModels: (providerName: RefreshableProviderName) => Promise<void>;
onDidChangeState: Event<RefreshableProviderName>;
state: RefreshModelStateOfProvider;
}

export const IRefreshModelService = createDecorator<IRefreshModelService>('RefreshModelService');
Expand All @@ -34,71 +61,120 @@ export class RefreshModelService extends Disposable implements IRefreshModelServ

readonly _serviceBrand: undefined;

private readonly _onDidChangeState = new Emitter<void>();
readonly onDidChangeState: Event<void> = this._onDidChangeState.event; // this is primarily for use in react, so react can listen + update on state changes
private readonly _onDidChangeState = new Emitter<RefreshableProviderName>();
readonly onDidChangeState: Event<RefreshableProviderName> = this._onDidChangeState.event; // this is primarily for use in react, so react can listen + update on state changes

constructor(
@IVoidSettingsService private readonly voidSettingsService: IVoidSettingsService,
@ILLMMessageService private readonly llmMessageService: ILLMMessageService,
) {
super()

// on mount, refresh ollama models
this.refreshOllamaModels()

// every time ollama.enabled changes, refresh ollama models, like useEffect
let relevantVals = () => [this.voidSettingsService.state.settingsOfProvider.ollama.enabled, this.voidSettingsService.state.settingsOfProvider.ollama.endpoint]
let prevVals = relevantVals()
this._register(
this.voidSettingsService.onDidChangeState(() => { // we might want to debounce this
const newVals = relevantVals()
if (!eq(prevVals, newVals)) {
this.refreshOllamaModels()
prevVals = newVals
}
})
)

}
const disposables: Set<IDisposable> = new Set()


const startRefreshing = () => {
this._clearAllTimeouts()
disposables.forEach(d => d.dispose())
disposables.clear()

if (!voidSettingsService.state.featureFlagSettings.autoRefreshModels) return

state: RefreshModelState = 'done'
for (const providerName of refreshableProviderNames) {

private _timeoutId: NodeJS.Timeout | null = null
private _cancelTimeout = () => {
if (this._timeoutId) {
clearTimeout(this._timeoutId)
this._timeoutId = null
const refresh = () => {
// const { enabled } = this.voidSettingsService.state.settingsOfProvider[providerName]
this.refreshModels(providerName, { enableProviderOnSuccess: true }) // enable the provider on success
}

refresh()

// every time providerName.enabled changes, refresh models too, like a useEffect
let relevantVals = () => refreshBasedOn[providerName].map(settingName => this.voidSettingsService.state.settingsOfProvider[providerName][settingName])
let prevVals = relevantVals() // each iteration of a for loop has its own context and vars, so this is ok
disposables.add(
this.voidSettingsService.onDidChangeState(() => { // we might want to debounce this
const newVals = relevantVals()
if (!eq(prevVals, newVals)) {
refresh()
prevVals = newVals
}
})
)
}
}

// on mount (when get init settings state), and if a relevant feature flag changes (detected natively right now by refreshing if any flag changes), start refreshing models
voidSettingsService.waitForInitState.then(() => {
startRefreshing()
this._register(
voidSettingsService.onDidChangeState((type) => { if (type === 'featureFlagSettings') startRefreshing() })
)
})

}
async refreshOllamaModels() {
// cancel any existing poll
this._cancelTimeout()

// if ollama is disabled, obivously done
if (!this.voidSettingsService.state.settingsOfProvider.ollama.enabled) {
this._setState('done')
return
}
state: RefreshModelStateOfProvider = {
ollama: { state: 'init', timeoutId: null },
openAICompatible: { state: 'init', timeoutId: null },
}


// start listening for models (and don't stop until success)
async refreshModels(providerName: RefreshableProviderName, options?: { enableProviderOnSuccess?: boolean }) {
this._clearProviderTimeout(providerName)

// start loading models
this._setState('loading')
this._setRefreshState(providerName, 'refreshing')

const fn = providerName === 'ollama' ? this.llmMessageService.ollamaList
: providerName === 'openAICompatible' ? this.llmMessageService.openAICompatibleList
: () => { }

this.llmMessageService.ollamaList({
fn({
onSuccess: ({ models }) => {
this.voidSettingsService.setDefaultModels('ollama', models.map(model => model.name))
this._setState('done')
this.voidSettingsService.setDefaultModels(providerName, models.map(model => {
if (providerName === 'ollama') return (model as OllamaModelResponse).name
else if (providerName === 'openAICompatible') return (model as OpenaiCompatibleModelResponse).id
else throw new Error('refreshMode fn: unknown provider', providerName)
}))

if (options?.enableProviderOnSuccess)
this.voidSettingsService.setSettingOfProvider(providerName, 'enabled', true)

this._setRefreshState(providerName, 'success')
},
onError: ({ error }) => {
// poll
console.log('retrying ollamaList:', error)
this._timeoutId = setTimeout(() => this.refreshOllamaModels(), 5000)
console.log('retrying list models:', providerName, error)
const timeoutId = setTimeout(() => this.refreshModels(providerName, options), REFRESH_INTERVAL)
this._setTimeoutId(providerName, timeoutId)
}
})
}

private _setState(state: RefreshModelState) {
this.state = state
this._onDidChangeState.fire()
_clearAllTimeouts() {
for (const providerName of refreshableProviderNames) {
this._clearProviderTimeout(providerName)
}
}

_clearProviderTimeout(providerName: RefreshableProviderName) {
// cancel any existing poll
if (this.state[providerName].timeoutId) {
clearTimeout(this.state[providerName].timeoutId)
this._setTimeoutId(providerName, null)
}
}

private _setTimeoutId(providerName: RefreshableProviderName, timeoutId: NodeJS.Timeout | null) {
this.state[providerName].timeoutId = timeoutId
}

private _setRefreshState(providerName: RefreshableProviderName, state: RefreshableState['state']) {
this.state[providerName].state = state
this._onDidChangeState.fire(providerName)
}
}

Expand Down
Loading