Skip to content

Commit 02d8a2d

Browse files
authored
refactor(sambanova): Drop instructor usage. (#455)
Native `response_format` is supposed to work with the openai-compatible API: https://docs.sambanova.ai/docs/en/api-reference/endpoints/chat#function-calling-parameters
1 parent 0155d06 commit 02d8a2d

File tree

3 files changed

+8
-57
lines changed

3 files changed

+8
-57
lines changed

pyproject.toml

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,6 @@ together = [
7272
"together",
7373
]
7474

75-
sambanova = [
76-
"instructor",
77-
]
78-
7975
ollama = [
8076
"ollama>=0.5.1"
8177
]
@@ -94,19 +90,20 @@ sagemaker = [
9490

9591
# These providers don't require any additional dependencies, but are included for completeness.
9692
azureopenai = []
97-
moonshot = []
98-
nebius = []
9993
databricks = []
10094
deepseek = []
10195
fireworks = []
10296
inception = []
103-
openai = []
104-
portkey = []
105-
openrouter = []
106-
lmstudio = []
10797
llama = []
108-
llamafile = []
10998
llamacpp = []
99+
llamafile = []
100+
lmstudio = []
101+
moonshot = []
102+
nebius = []
103+
openai = []
104+
openrouter = []
105+
portkey = []
106+
sambanova = []
110107

111108
[project.urls]
112109
Documentation = "https://mozilla-ai.github.io/any-llm/"
Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,4 @@
1-
from collections.abc import AsyncIterator
2-
from typing import Any, cast
3-
4-
from pydantic import BaseModel
5-
61
from any_llm.providers.openai.base import BaseOpenAIProvider
7-
from any_llm.types.completion import ChatCompletion, ChatCompletionChunk, CompletionParams
8-
from any_llm.utils.instructor import _convert_instructor_response
9-
10-
MISSING_PACKAGES_ERROR = None
11-
try:
12-
import instructor
13-
14-
except ImportError as e:
15-
MISSING_PACKAGES_ERROR = e
162

173

184
class SambanovaProvider(BaseOpenAIProvider):
@@ -21,35 +7,4 @@ class SambanovaProvider(BaseOpenAIProvider):
217
PROVIDER_NAME = "sambanova"
228
PROVIDER_DOCUMENTATION_URL = "https://sambanova.ai/"
239

24-
MISSING_PACKAGES_ERROR = MISSING_PACKAGES_ERROR
2510
SUPPORTS_COMPLETION_PDF = False
26-
27-
async def _acompletion(
28-
self, params: CompletionParams, **kwargs: Any
29-
) -> ChatCompletion | AsyncIterator[ChatCompletionChunk]:
30-
"""Make the API call to SambaNova service with instructor for structured output."""
31-
32-
if params.reasoning_effort == "auto":
33-
params.reasoning_effort = None
34-
35-
if params.response_format:
36-
instructor_client = instructor.from_openai(self.client)
37-
if not isinstance(params.response_format, type) or not issubclass(params.response_format, BaseModel):
38-
msg = "response_format must be a pydantic model"
39-
raise ValueError(msg)
40-
response = await instructor_client.chat.completions.create(
41-
model=params.model_id,
42-
messages=cast("Any", params.messages),
43-
response_model=params.response_format,
44-
**params.model_dump(exclude_none=True, exclude={"model_id", "messages", "response_format"}),
45-
**kwargs,
46-
)
47-
return _convert_instructor_response(response, params.model_id, self.PROVIDER_NAME)
48-
return self._convert_completion_response_async(
49-
await self.client.chat.completions.create(
50-
model=params.model_id,
51-
messages=cast("Any", params.messages),
52-
**params.model_dump(exclude_none=True, exclude={"model_id", "messages"}),
53-
**kwargs,
54-
)
55-
)

tests/unit/test_provider.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,6 @@ def test_providers_raise_MissingApiKeyError(provider: LLMProvider) -> None:
166166
("huggingface", "huggingface_hub"),
167167
("mistral", "mistralai"),
168168
("ollama", "ollama"),
169-
("sambanova", "instructor"),
170169
("together", "together"),
171170
("voyage", "voyageai"),
172171
("watsonx", "ibm_watsonx_ai"),

0 commit comments

Comments
 (0)