Skip to content

Commit 144aef5

Browse files
committed
fix(tests): pin to branch for config used in tests
Signed-off-by: Ettore Di Giacinto <[email protected]>
1 parent 244f4b5 commit 144aef5

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

core/config/backend_config_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,9 @@ parameters:
4848
Expect(config.Name).To(Equal("bar-baz"))
4949
Expect(config.Validate()).To(BeTrue())
5050

51-
// download https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml
51+
// download https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml
5252
httpClient := http.Client{}
53-
resp, err := httpClient.Get("https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml")
53+
resp, err := httpClient.Get("https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml")
5454
Expect(err).To(BeNil())
5555
defer resp.Body.Close()
5656
tmp, err = os.CreateTemp("", "config.yaml")

core/http/app_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -476,7 +476,7 @@ var _ = Describe("API test", func() {
476476
})
477477
It("apply models from config", func() {
478478
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
479-
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml",
479+
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml",
480480
})
481481

482482
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
@@ -600,7 +600,7 @@ var _ = Describe("API test", func() {
600600

601601
modelName := "hermes-2-pro-mistral"
602602
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
603-
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml",
603+
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml",
604604
})
605605

606606
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))

docs/content/docs/features/model-gallery.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,12 +134,12 @@ curl $LOCALAI/models/apply -H "Content-Type: application/json" -d '{
134134
}'
135135
```
136136

137-
An example that installs openllama can be:
137+
An example that installs hermes-2-pro-mistral can be:
138138

139139
```bash
140140
LOCALAI=http://localhost:8080
141141
curl $LOCALAI/models/apply -H "Content-Type: application/json" -d '{
142-
"config_url": "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml"
142+
"config_url": "https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml"
143143
}'
144144
```
145145

0 commit comments

Comments
 (0)