File tree Expand file tree Collapse file tree 4 files changed +10
-7
lines changed
extensions/llamacpp-extension Expand file tree Collapse file tree 4 files changed +10
-7
lines changed Original file line number Diff line number Diff line change @@ -13,5 +13,8 @@ export default defineConfig({
1313 define : {
1414 SETTINGS : JSON . stringify ( settingJson ) ,
1515 ENGINE : JSON . stringify ( pkgJson . engine ) ,
16+ IS_WINDOWS : JSON . stringify ( process . platform === 'win32' ) ,
17+ IS_MAC : JSON . stringify ( process . platform === 'darwin' ) ,
18+ IS_LINUX : JSON . stringify ( process . platform === 'linux' ) ,
1619 } ,
1720} )
Original file line number Diff line number Diff line change @@ -97,9 +97,7 @@ export async function getBackendExePath(
9797 backend : string ,
9898 version : string
9999) : Promise < string > {
100- const sysInfo = await window . core . api . getSystemInfo ( )
101- const exe_name =
102- sysInfo . os_type === 'windows' ? 'llama-server.exe' : 'llama-server'
100+ const exe_name = IS_WINDOWS ? 'llama-server.exe' : 'llama-server'
103101 const backendDir = await getBackendDir ( backend , version )
104102 let exePath : string
105103 const buildDir = await joinPath ( [ backendDir , 'build' ] )
@@ -136,8 +134,7 @@ export async function downloadBackend(
136134 // Get proxy configuration from localStorage
137135 const proxyConfig = getProxyConfig ( )
138136
139- const sysInfo = await window . core . api . getSystemInfo ( )
140- const platformName = sysInfo . os_type === 'windows' ? 'win' : 'linux'
137+ const platformName = IS_WINDOWS ? 'win' : 'linux'
141138
142139 const downloadItems = [
143140 {
Original file line number Diff line number Diff line change 11declare const SETTINGS : SettingComponentProps [ ]
22declare const ENGINE : string
3+ declare const IS_WINDOWS : boolean
4+ declare const IS_MAC : boolean
5+ declare const IS_LINUX : boolean
Original file line number Diff line number Diff line change @@ -364,7 +364,7 @@ export default class llamacpp_extension extends AIEngine {
364364 await this . ensureBackendReady ( bestBackend , bestVersion )
365365
366366 // Add a small delay on Windows to ensure file operations complete
367- if ( process . platform === 'win32' ) {
367+ if ( IS_WINDOWS ) {
368368 await new Promise ( ( resolve ) => setTimeout ( resolve , 1000 ) )
369369 }
370370
@@ -387,7 +387,7 @@ export default class llamacpp_extension extends AIEngine {
387387 )
388388
389389 // Clean up old backends (with additional delay on Windows)
390- if ( process . platform === 'win32' ) {
390+ if ( IS_WINDOWS ) {
391391 await new Promise ( ( resolve ) => setTimeout ( resolve , 500 ) )
392392 }
393393 await this . removeOldBackends ( bestVersion , bestBackend )
You can’t perform that action at this time.
0 commit comments