Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions extensions/void/README.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
This folder is no longer relevant, since Void is no longer just an extension.

See the new Void code in `src/vs/workbench/contrib/void` (and a few other locations - you can just search "Void").
54 changes: 29 additions & 25 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@
"@xterm/addon-webgl": "^0.19.0-beta.64",
"@xterm/headless": "^5.6.0-beta.64",
"@xterm/xterm": "^5.6.0-beta.64",
"groq-sdk": "^0.9.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.2",
"jschardet": "3.1.3",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ import OpenAI from 'openai';
import { Ollama } from 'ollama/browser'
import { Content, GoogleGenerativeAI, GoogleGenerativeAIFetchError } from '@google/generative-ai';
import { posthog } from 'posthog-js'
import Groq, { GroqError } from 'groq-sdk'

import type { VoidConfig } from '../../../registerConfig.js';
import type { LLMMessage, OnText, OnError, OnFinalMessage, SendLLMMMessageParams, } from '../../../../../../../platform/void/common/llmMessageTypes.js';
import { LLMMessageServiceParams } from '../../../../../../../platform/void/common/llmMessageTypes.js';

type SendLLMMessageFnTypeInternal = (params: {
messages: LLMMessage[];
Expand Down Expand Up @@ -211,6 +212,44 @@ export const sendOllamaMsg: SendLLMMessageFnTypeInternal = ({ messages, onText,

};

// Groq
const sendGroqMsg: SendLLMMessageFnTypeInternal = async ({ messages, onText, onFinalMessage, onError, voidConfig, _setAborter }) => {
let fullText = '';

const groq = new Groq({
apiKey: voidConfig.groq.apikey,
dangerouslyAllowBrowser: true
});

await groq.chat.completions
.create({
messages: messages,
model: voidConfig.groq.model,
stream: true,
temperature: 0.7,
max_tokens: parseMaxTokensStr(voidConfig.default.maxTokens),
})
.then(async response => {
_setAborter(() => response.controller.abort())
// when receive text
for await (const chunk of response) {
const newText = chunk.choices[0]?.delta?.content || '';
if (newText) {
fullText += newText;
onText({ newText, fullText });
}
}

onFinalMessage({ fullText });
})
.catch(error => {
onError({ error });
})


};


// Greptile
// https://docs.greptile.com/api-reference/query
// https://docs.greptile.com/quickstart#sample-response-streamed
Expand Down
13 changes: 13 additions & 0 deletions src/vs/workbench/contrib/void/browser/registerConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export const nonDefaultConfigFields = [
'openAI',
'gemini',
'greptile',
'groq',
'ollama',
'openRouter',
'openAICompatible',
Expand Down Expand Up @@ -122,6 +123,18 @@ const voidConfigInfo: Record<
repository: configString('Repository identifier in "owner / repository" format.', ''),
branch: configString('Name of the branch to use.', 'main'),
},
groq: {
apikey: configString('Groq API key.', ''),
model: configEnum(
'Groq model to use.',
'mixtral-8x7b-32768',
[
"mixtral-8x7b-32768",
"llama2-70b-4096",
"gemma-7b-it"
] as const
),
},
ollama: {
endpoint: configString(
'The endpoint of your Ollama instance. Start Ollama by running `OLLAMA_ORIGINS="vscode - webview://*" ollama serve`.',
Expand Down