File tree Expand file tree Collapse file tree 3 files changed +6
-6
lines changed
docs/content/docs/features Expand file tree Collapse file tree 3 files changed +6
-6
lines changed Original file line number Diff line number Diff line change @@ -48,9 +48,9 @@ parameters:
4848 Expect (config .Name ).To (Equal ("bar-baz" ))
4949 Expect (config .Validate ()).To (BeTrue ())
5050
51- // download https://raw.githubusercontent.com/mudler/LocalAI/master /embedded/models/hermes-2-pro-mistral.yaml
51+ // download https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0 /embedded/models/hermes-2-pro-mistral.yaml
5252 httpClient := http.Client {}
53- resp , err := httpClient .Get ("https://raw.githubusercontent.com/mudler/LocalAI/master /embedded/models/hermes-2-pro-mistral.yaml" )
53+ resp , err := httpClient .Get ("https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0 /embedded/models/hermes-2-pro-mistral.yaml" )
5454 Expect (err ).To (BeNil ())
5555 defer resp .Body .Close ()
5656 tmp , err = os .CreateTemp ("" , "config.yaml" )
Original file line number Diff line number Diff line change @@ -476,7 +476,7 @@ var _ = Describe("API test", func() {
476476 })
477477 It ("apply models from config" , func () {
478478 response := postModelApplyRequest ("http://127.0.0.1:9090/models/apply" , modelApplyRequest {
479- ConfigURL : "https://raw.githubusercontent.com/mudler/LocalAI/master /embedded/models/hermes-2-pro-mistral.yaml" ,
479+ ConfigURL : "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0 /embedded/models/hermes-2-pro-mistral.yaml" ,
480480 })
481481
482482 Expect (response ["uuid" ]).ToNot (BeEmpty (), fmt .Sprint (response ))
@@ -600,7 +600,7 @@ var _ = Describe("API test", func() {
600600
601601 modelName := "hermes-2-pro-mistral"
602602 response := postModelApplyRequest ("http://127.0.0.1:9090/models/apply" , modelApplyRequest {
603- ConfigURL : "https://raw.githubusercontent.com/mudler/LocalAI/master /embedded/models/hermes-2-pro-mistral.yaml" ,
603+ ConfigURL : "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0 /embedded/models/hermes-2-pro-mistral.yaml" ,
604604 })
605605
606606 Expect (response ["uuid" ]).ToNot (BeEmpty (), fmt .Sprint (response ))
Original file line number Diff line number Diff line change @@ -134,12 +134,12 @@ curl $LOCALAI/models/apply -H "Content-Type: application/json" -d '{
134134 }'
135135```
136136
137- An example that installs openllama can be:
137+ An example that installs hermes-2-pro-mistral can be:
138138
139139``` bash
140140LOCALAI=http://localhost:8080
141141curl $LOCALAI /models/apply -H " Content-Type: application/json" -d ' {
142- "config_url": "https://raw.githubusercontent.com/mudler/LocalAI/master /embedded/models/hermes-2-pro-mistral.yaml"
142+ "config_url": "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0 /embedded/models/hermes-2-pro-mistral.yaml"
143143 }'
144144```
145145
You can’t perform that action at this time.
0 commit comments