Skip to content

Commit 3f1e630

Browse files
authored
refactor(sagemaker): Drop instructor usage. (#459)
1 parent dcf0935 commit 3f1e630

File tree

1 file changed

+3
-26
lines changed

1 file changed

+3
-26
lines changed

src/any_llm/providers/sagemaker/sagemaker.py

Lines changed: 3 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,12 @@
66
from collections.abc import AsyncIterator, Callable, Iterator, Sequence
77
from typing import Any
88

9-
from pydantic import BaseModel
10-
119
from any_llm.any_llm import AnyLLM
1210
from any_llm.config import ClientConfig
13-
from any_llm.exceptions import MissingApiKeyError
11+
from any_llm.exceptions import MissingApiKeyError, UnsupportedParameterError
1412
from any_llm.logging import logger
1513
from any_llm.types.completion import ChatCompletion, ChatCompletionChunk, CompletionParams, CreateEmbeddingResponse
1614
from any_llm.types.model import Model
17-
from any_llm.utils.instructor import _convert_instructor_response
1815

1916
MISSING_PACKAGES_ERROR = None
2017
try:
@@ -137,28 +134,8 @@ def _completion(
137134
completion_kwargs = self._convert_completion_params(params, **kwargs)
138135

139136
if params.response_format:
140-
if params.stream:
141-
msg = "stream is not supported for response_format"
142-
raise ValueError(msg)
143-
144-
if not isinstance(params.response_format, type) or not issubclass(params.response_format, BaseModel):
145-
msg = "response_format must be a pydantic model"
146-
raise ValueError(msg)
147-
148-
response = self.client.invoke_endpoint(
149-
EndpointName=params.model_id,
150-
Body=json.dumps(completion_kwargs),
151-
ContentType="application/json",
152-
)
153-
154-
response_body = json.loads(response["Body"].read())
155-
156-
try:
157-
structured_response = params.response_format.model_validate(response_body)
158-
return _convert_instructor_response(structured_response, params.model_id, "aws")
159-
except (ValueError, TypeError) as e:
160-
logger.warning("Failed to parse structured response: %s", e)
161-
return self._convert_completion_response({"model": params.model_id, **response_body})
137+
param = "response_format"
138+
raise UnsupportedParameterError(param, "sagemaker")
162139

163140
if params.stream:
164141
response = self.client.invoke_endpoint_with_response_stream(

0 commit comments

Comments
 (0)