Skip to content

Commit 30a354b

Browse files
authored
fix(inception):.Update SUPPORTED properties. (#502)
Add unit test for unsupported `response_format`.
1 parent a0e46d0 commit 30a354b

File tree

3 files changed

+33
-1
lines changed

3 files changed

+33
-1
lines changed

src/any_llm/providers/inception/inception.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,8 @@
1+
from typing import Any
2+
3+
from any_llm.exceptions import UnsupportedParameterError
14
from any_llm.providers.openai.base import BaseOpenAIProvider
5+
from any_llm.types.completion import CompletionParams
26

37

48
class InceptionProvider(BaseOpenAIProvider):
@@ -8,3 +12,14 @@ class InceptionProvider(BaseOpenAIProvider):
812
PROVIDER_DOCUMENTATION_URL = "https://inceptionlabs.ai/"
913

1014
SUPPORTS_EMBEDDING = False # Inception doesn't host an embedding model
15+
SUPPORTS_COMPLETION_IMAGE = False
16+
SUPPORTS_COMPLETION_PDF = False
17+
18+
@staticmethod
19+
def _convert_completion_params(params: CompletionParams, **kwargs: Any) -> dict[str, Any]:
20+
if params.response_format is not None:
21+
param = "response_format"
22+
raise UnsupportedParameterError(param, "inception")
23+
converted_params = params.model_dump(exclude_none=True, exclude={"model_id", "messages"})
24+
converted_params.update(kwargs)
25+
return converted_params

tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def provider_model_map() -> dict[LLMProvider, str]:
4242
LLMProvider.SAMBANOVA: "Meta-Llama-3.1-8B-Instruct",
4343
LLMProvider.TOGETHER: "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
4444
LLMProvider.XAI: "grok-3-mini-latest",
45-
LLMProvider.INCEPTION: "inception-3-70b-instruct",
45+
LLMProvider.INCEPTION: "mercury",
4646
LLMProvider.NEBIUS: "openai/gpt-oss-20b",
4747
LLMProvider.OLLAMA: "llama3.2:1b",
4848
LLMProvider.LLAMAFILE: "N/A",
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import pytest
2+
from pydantic import BaseModel
3+
4+
from any_llm.exceptions import UnsupportedParameterError
5+
from any_llm.providers.inception.inception import InceptionProvider
6+
from any_llm.types.completion import CompletionParams
7+
8+
9+
def test_inception_unsupported_response_format() -> None:
10+
class ResponseFormatModel(BaseModel):
11+
response: str
12+
13+
params = CompletionParams(
14+
model_id="mercury", messages=[{"role": "user", "content": "Hello"}], response_format=ResponseFormatModel
15+
)
16+
with pytest.raises(UnsupportedParameterError, match="'response_format' is not supported for inception"):
17+
InceptionProvider._convert_completion_params(params)

0 commit comments

Comments
 (0)