Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 22 additions & 2 deletions src/any_llm/providers/fireworks/fireworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
from any_llm.types.completion import ChatCompletionChunk, ChatCompletion
from any_llm.provider import Provider
from any_llm.providers.helpers import create_completion_from_response
from any_llm.providers.fireworks.utils import _create_openai_chunk_from_fireworks_chunk
from any_llm.providers.fireworks.utils import _create_openai_chunk_from_fireworks_chunk, _create_response_with_output_text
from any_llm.types.responses import Response, ResponseStreamEvent

from openai import Stream,OpenAI

class FireworksProvider(Provider):
PROVIDER_NAME = "Fireworks"
Expand All @@ -20,7 +22,7 @@ class FireworksProvider(Provider):

SUPPORTS_COMPLETION_STREAMING = True
SUPPORTS_COMPLETION = True
SUPPORTS_RESPONSES = False
SUPPORTS_RESPONSES = True
SUPPORTS_COMPLETION_REASONING = False
SUPPORTS_EMBEDDING = False

Expand Down Expand Up @@ -74,3 +76,21 @@ def _make_api_call(
provider_name="Fireworks",
model=model,
)

def responses(self, model, input_data, **kwargs) -> Response | Iterator[ResponseStreamEvent]:
"""Call Fireworks Responses API and normalize into ChatCompletion/Chunks."""
client = OpenAI(
base_url="https://api.fireworks.ai/inference/v1",
api_key=self.config.api_key,
)

response = client.responses.create(
model=f'accounts/fireworks/models/{model}',
input=input_data,
**kwargs,
)

if not isinstance(response, (Response, Stream)):
raise ValueError(f"Responses API returned an unexpected type: {type(response)}")

return _create_response_with_output_text(response)
24 changes: 24 additions & 0 deletions src/any_llm/providers/fireworks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,27 @@ def _create_openai_chunk_from_fireworks_chunk(fireworks_chunk: Any) -> ChatCompl
object="chat.completion.chunk",
usage=usage,
)

class ResponseWrapper:
"""Wrapper class to add output_text property to Fireworks Response objects."""
def __init__(self, response):
self._response = response
self._output_text = None

def __getattr__(self, name):
return getattr(self._response, name)

@property
def output_text(self):
if self._output_text is None and hasattr(self._response, 'output') and self._response.output:
try:
raw_output = self._response.output[-1].content[0].text
self._output_text = raw_output.split("</think>")[-1].strip()
except (IndexError, AttributeError):
self._output_text = ""
return self._output_text or ""


def _create_response_with_output_text(fireworks_response: Any) -> Any:
"""Wrap a Fireworks response to add output_text property."""
return ResponseWrapper(fireworks_response)