Skip to content

Commit 64a6080

Browse files
authored
fix: check for env value before setting (#6266)
* fix: check for env value before setting * Use empty instead of none
1 parent ef90f07 commit 64a6080

File tree

2 files changed

+10
-9
lines changed

2 files changed

+10
-9
lines changed

extensions/llamacpp-extension/settings.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
"description": "Environmental variables for llama.cpp(KEY=VALUE), separated by ';'",
1717
"controllerType": "input",
1818
"controllerProps": {
19-
"value": "none",
19+
"value": "",
2020
"placeholder": "Eg. GGML_VK_VISIBLE_DEVICES=0,1",
2121
"type": "text",
2222
"textAlign": "right"

extensions/llamacpp-extension/src/index.ts

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1082,17 +1082,18 @@ export default class llamacpp_extension extends AIEngine {
10821082

10831083
// If we reach here, download completed successfully (including validation)
10841084
// The downloadFiles function only returns successfully if all files downloaded AND validated
1085-
events.emit(DownloadEvent.onFileDownloadAndVerificationSuccess, {
1086-
modelId,
1087-
downloadType: 'Model'
1085+
events.emit(DownloadEvent.onFileDownloadAndVerificationSuccess, {
1086+
modelId,
1087+
downloadType: 'Model',
10881088
})
10891089
} catch (error) {
10901090
logger.error('Error downloading model:', modelId, opts, error)
10911091
const errorMessage =
10921092
error instanceof Error ? error.message : String(error)
10931093

10941094
// Check if this is a cancellation
1095-
const isCancellationError = errorMessage.includes('Download cancelled') ||
1095+
const isCancellationError =
1096+
errorMessage.includes('Download cancelled') ||
10961097
errorMessage.includes('Validation cancelled') ||
10971098
errorMessage.includes('Hash computation cancelled') ||
10981099
errorMessage.includes('cancelled') ||
@@ -1372,7 +1373,7 @@ export default class llamacpp_extension extends AIEngine {
13721373
envs['LLAMA_API_KEY'] = api_key
13731374

13741375
// set user envs
1375-
this.parseEnvFromString(envs, this.llamacpp_env)
1376+
if (this.llamacpp_env) this.parseEnvFromString(envs, this.llamacpp_env)
13761377

13771378
// model option is required
13781379
// NOTE: model_path and mmproj_path can be either relative to Jan's data folder or absolute path
@@ -1751,7 +1752,7 @@ export default class llamacpp_extension extends AIEngine {
17511752
}
17521753
// set envs
17531754
const envs: Record<string, string> = {}
1754-
this.parseEnvFromString(envs, this.llamacpp_env)
1755+
if (this.llamacpp_env) this.parseEnvFromString(envs, this.llamacpp_env)
17551756

17561757
// Ensure backend is downloaded and ready before proceeding
17571758
await this.ensureBackendReady(backend, version)
@@ -1767,7 +1768,7 @@ export default class llamacpp_extension extends AIEngine {
17671768
return dList
17681769
} catch (error) {
17691770
logger.error('Failed to query devices:\n', error)
1770-
throw new Error("Failed to load llamacpp backend")
1771+
throw new Error('Failed to load llamacpp backend')
17711772
}
17721773
}
17731774

@@ -1876,7 +1877,7 @@ export default class llamacpp_extension extends AIEngine {
18761877
logger.info(
18771878
`Using explicit key_length: ${keyLen}, value_length: ${valLen}`
18781879
)
1879-
headDim = (keyLen + valLen)
1880+
headDim = keyLen + valLen
18801881
} else {
18811882
// Fall back to embedding_length estimation
18821883
const embeddingLen = Number(meta[`${arch}.embedding_length`])

0 commit comments

Comments
 (0)