We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f2a9177 commit 34446b1Copy full SHA for 34446b1
extensions/llamacpp-extension/src/index.ts
@@ -1649,7 +1649,7 @@ export default class llamacpp_extension extends AIEngine {
1649
if (cfg.main_gpu !== undefined && cfg.main_gpu != 0)
1650
args.push('--main-gpu', String(cfg.main_gpu))
1651
// Note: Older llama.cpp versions are no longer supported
1652
- if (cfg.flash_attn !== undefined || cfg.flash_attn === '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
+ if (cfg.flash_attn !== undefined || cfg.flash_attn !== '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
1653
1654
// Boolean flags
1655
if (cfg.ctx_shift) args.push('--context-shift')
0 commit comments