Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ Alternatively, if you want to build Void from the terminal, instead of pressing
#### Common Fixes

- Make sure you followed the prerequisite steps.
- Make sure you have the same NodeJS version as `.nvmrc`.
- Make sure you have Node version `20.16.0` (the version in `.nvmrc`)!
- If you get `"TypeError: Failed to fetch dynamically imported module"`, make sure all imports end with `.js`.
- If you see missing styles, wait a few seconds and then reload.
- If you have any questions, feel free to [submit an issue](https://github.com/voideditor/void/issues/new). You can also refer to VSCode's complete [How to Contribute](https://github.com/microsoft/vscode/wiki/How-to-Contribute) page.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ This repo contains the full sourcecode for Void. We are currently in [open beta]

2. To get started working on Void, see [Contributing](https://github.com/voideditor/void/blob/main/CONTRIBUTING.md).

3. We're open to collaborations of all types - just reach out.
3. We're open to collaborations and suggestions of all types - just reach out.


## Reference
Expand Down
60 changes: 60 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
},
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",
"@floating-ui/react": "^0.27.3",
"@google/generative-ai": "^0.21.0",
"@microsoft/1ds-core-js": "^3.2.13",
"@microsoft/1ds-post-js": "^3.2.13",
Expand Down
1 change: 1 addition & 0 deletions remote/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"cookie": "^0.4.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.2",
"debounced": "1.0.2",
"jschardet": "3.1.3",
"kerberos": "2.1.1",
"minimist": "^1.2.6",
Expand Down
25 changes: 12 additions & 13 deletions src/vs/platform/void/common/llmMessageService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
* Licensed under the Apache License, Version 2.0. See LICENSE.txt for more information.
*--------------------------------------------------------------------------------------*/

import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainLLMMessageParams, MainLLMMessageAbortParams, ServiceModelListParams, EventModelListOnSuccessParams, EventModelListOnErrorParams, MainModelListParams, OllamaModelResponse, OpenaiCompatibleModelResponse, } from './llmMessageTypes.js';
import { EventLLMMessageOnTextParams, EventLLMMessageOnErrorParams, EventLLMMessageOnFinalMessageParams, ServiceSendLLMMessageParams, MainSendLLMMessageParams, MainLLMMessageAbortParams, ServiceModelListParams, EventModelListOnSuccessParams, EventModelListOnErrorParams, MainModelListParams, OllamaModelResponse, OpenaiCompatibleModelResponse, } from './llmMessageTypes.js';
import { IChannel } from '../../../base/parts/ipc/common/ipc.js';
import { IMainProcessService } from '../../ipc/common/mainProcessService.js';
import { InstantiationType, registerSingleton } from '../../instantiation/common/extensions.js';
Expand Down Expand Up @@ -96,31 +96,29 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
onError({ message: 'Please add a Provider in Settings!', fullError: null })
return null
}
const { providerName, modelName } = modelSelection

// add ai instructions here because we don't have access to voidSettingsService on the other side of the proxy
const aiInstructions = this.voidSettingsService.state.globalSettings.aiInstructions
if (aiInstructions)
proxyParams.messages.unshift({ role: 'system', content: aiInstructions })
const { providerName, modelName } = modelSelection

// add state for request id
const requestId_ = generateUuid();
this.onTextHooks_llm[requestId_] = onText
this.onFinalMessageHooks_llm[requestId_] = onFinalMessage
this.onErrorHooks_llm[requestId_] = onError
const requestId = generateUuid();
this.onTextHooks_llm[requestId] = onText
this.onFinalMessageHooks_llm[requestId] = onFinalMessage
this.onErrorHooks_llm[requestId] = onError

const { aiInstructions } = this.voidSettingsService.state.globalSettings
const { settingsOfProvider } = this.voidSettingsService.state

// params will be stripped of all its functions over the IPC channel
this.channel.call('sendLLMMessage', {
...proxyParams,
requestId: requestId_,
aiInstructions,
requestId,
providerName,
modelName,
settingsOfProvider,
} satisfies MainLLMMessageParams);
} satisfies MainSendLLMMessageParams);

return requestId_
return requestId
}


Expand All @@ -147,6 +145,7 @@ export class LLMMessageService extends Disposable implements ILLMMessageService
} satisfies MainModelListParams<OllamaModelResponse>)
}


openAICompatibleList = (params: ServiceModelListParams<OpenaiCompatibleModelResponse>) => {
const { onSuccess, onError, ...proxyParams } = params

Expand Down
86 changes: 51 additions & 35 deletions src/vs/platform/void/common/llmMessageTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
* Licensed under the Apache License, Version 2.0. See LICENSE.txt for more information.
*--------------------------------------------------------------------------------------*/

import { IRange } from '../../../editor/common/core/range'
import { ProviderName, SettingsOfProvider } from './voidSettingsTypes.js'


Expand Down Expand Up @@ -35,67 +34,84 @@ export type _InternalLLMMessage = {
content: string;
}

type _InternalOllamaFIMMessages = {
prefix: string;
suffix: string;
stopTokens: string[];
}

export type ServiceSendLLMFeatureParams = {
useProviderFor: 'Ctrl+K';
range: IRange;
} | {
useProviderFor: 'Ctrl+L';
type SendLLMType = {
type: 'sendLLMMessage';
messages: LLMMessage[];
} | {
useProviderFor: 'Autocomplete';
range: IRange;
type: 'ollamaFIM';
messages: _InternalOllamaFIMMessages;
}

// service types
export type ServiceSendLLMMessageParams = {
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;
logging: { loggingName: string, };
useProviderFor: 'Ctrl+K' | 'Ctrl+L' | 'Autocomplete';
} & SendLLMType

// params to the true sendLLMMessage function
export type LLMMMessageParams = {
export type SendLLMMessageParams = {
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;
logging: { loggingName: string, };
abortRef: AbortRef;

messages: LLMMessage[];
aiInstructions: string;

logging: {
loggingName: string,
};
providerName: ProviderName;
modelName: string;
settingsOfProvider: SettingsOfProvider;
}
} & SendLLMType

export type ServiceSendLLMMessageParams = {
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;

messages: LLMMessage[];

logging: {
loggingName: string,
};
} & ServiceSendLLMFeatureParams

// can't send functions across a proxy, use listeners instead
export type BlockedMainLLMMessageParams = 'onText' | 'onFinalMessage' | 'onError' | 'abortRef'
export type MainSendLLMMessageParams = Omit<SendLLMMessageParams, BlockedMainLLMMessageParams> & { requestId: string } & SendLLMType

export type MainLLMMessageParams = Omit<LLMMMessageParams, BlockedMainLLMMessageParams> & { requestId: string }
export type MainLLMMessageAbortParams = { requestId: string }

export type EventLLMMessageOnTextParams = Parameters<OnText>[0] & { requestId: string }
export type EventLLMMessageOnFinalMessageParams = Parameters<OnFinalMessage>[0] & { requestId: string }
export type EventLLMMessageOnErrorParams = Parameters<OnError>[0] & { requestId: string }

export type _InternalSendLLMMessageFnType = (params: {
messages: _InternalLLMMessage[];
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;
settingsOfProvider: SettingsOfProvider;
providerName: ProviderName;
modelName: string;

_setAborter: (aborter: () => void) => void;
}) => void
export type _InternalSendLLMMessageFnType = (
params: {
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;
providerName: ProviderName;
settingsOfProvider: SettingsOfProvider;
modelName: string;
_setAborter: (aborter: () => void) => void;

messages: _InternalLLMMessage[];
}
) => void

export type _InternalOllamaFIMMessageFnType = (
params: {
onText: OnText;
onFinalMessage: OnFinalMessage;
onError: OnError;
providerName: ProviderName;
settingsOfProvider: SettingsOfProvider;
modelName: string;
_setAborter: (aborter: () => void) => void;

messages: _InternalOllamaFIMMessages;
}
) => void

// service -> main -> internal -> event (back to main)
// (browser)
Expand Down
7 changes: 6 additions & 1 deletion src/vs/platform/void/common/voidSettingsService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ let _computeModelOptions = (settingsOfProvider: SettingsOfProvider) => {
const defaultState = () => {
const d: VoidSettingsState = {
settingsOfProvider: deepClone(defaultSettingsOfProvider),
modelSelectionOfFeature: { 'Ctrl+L': null, 'Ctrl+K': null, 'Autocomplete': null },
modelSelectionOfFeature: { 'Ctrl+L': null, 'Ctrl+K': null, 'Autocomplete': null, 'FastApply': null },
globalSettings: deepClone(defaultGlobalSettings),
_modelOptions: _computeModelOptions(defaultSettingsOfProvider), // computed
}
Expand Down Expand Up @@ -137,6 +137,11 @@ class VoidSettingsService extends Disposable implements IVoidSettingsService {
...defaultSettingsOfProvider.gemini.models.filter(m => /* if cant find the model in readS (yes this is O(n^2), very small) */ !readS.settingsOfProvider.gemini.models.find(m2 => m2.modelName === m.modelName))
]
}
},
modelSelectionOfFeature: {
// A HACK BECAUSE WE ADDED FastApply
...{ 'FastApply': null },
...readS.modelSelectionOfFeature,
}
}

Expand Down
27 changes: 18 additions & 9 deletions src/vs/platform/void/common/voidSettingsTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,10 @@ export const defaultDeepseekModels = modelInfoOfDefaultNames([

// https://console.groq.com/docs/models
export const defaultGroqModels = modelInfoOfDefaultNames([
"mixtral-8x7b-32768",
"llama2-70b-4096",
"gemma-7b-it"
"distil-whisper-large-v3-en",
"llama-3.3-70b-versatile",
"llama-3.1-8b-instant",
"gemma2-9b-it"
])


Expand Down Expand Up @@ -431,14 +432,22 @@ export const modelSelectionsEqual = (m1: ModelSelection, m2: ModelSelection) =>
}

// this is a state
export type ModelSelectionOfFeature = {
'Ctrl+L': ModelSelection | null,
'Ctrl+K': ModelSelection | null,
'Autocomplete': ModelSelection | null,
}
export const featureNames = ['Ctrl+L', 'Ctrl+K', 'Autocomplete', 'FastApply'] as const
export type ModelSelectionOfFeature = Record<(typeof featureNames)[number], ModelSelection | null>
export type FeatureName = keyof ModelSelectionOfFeature
export const featureNames = ['Ctrl+L', 'Ctrl+K', 'Autocomplete'] as const

export const displayInfoOfFeatureName = (featureName: FeatureName) => {
if (featureName === 'Autocomplete')
return 'Autocomplete'
else if (featureName === 'Ctrl+K')
return 'Quick Edit'
else if (featureName === 'Ctrl+L')
return 'Sidebar Chat'
else if (featureName === 'FastApply')
return 'Fast Apply'
else
throw new Error(`Feature Name ${featureName} not allowed`)
}



Expand Down
Loading