Skip to content

Commit 34446b1

Browse files
committed
fix: compare
1 parent f2a9177 commit 34446b1

File tree

1 file changed

+1
-1
lines changed
  • extensions/llamacpp-extension/src

1 file changed

+1
-1
lines changed

extensions/llamacpp-extension/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1649,7 +1649,7 @@ export default class llamacpp_extension extends AIEngine {
16491649
if (cfg.main_gpu !== undefined && cfg.main_gpu != 0)
16501650
args.push('--main-gpu', String(cfg.main_gpu))
16511651
// Note: Older llama.cpp versions are no longer supported
1652-
if (cfg.flash_attn !== undefined || cfg.flash_attn === '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
1652+
if (cfg.flash_attn !== undefined || cfg.flash_attn !== '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
16531653

16541654
// Boolean flags
16551655
if (cfg.ctx_shift) args.push('--context-shift')

0 commit comments

Comments
 (0)