Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 3 additions & 5 deletions gpt_engineer/core/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@

from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.chat_models.base import BaseChatModel
from langchain_openai import ChatOpenAI
from langchain.schema import (
AIMessage,
HumanMessage,
SystemMessage,
messages_from_dict,
messages_to_dict,
)
from langchain_community.chat_models import AzureChatOpenAI, ChatOpenAI
from langchain_community.chat_models import AzureChatOpenAI

from gpt_engineer.core.token_usage import TokenUsageLog

Expand Down Expand Up @@ -125,9 +126,7 @@ def next(

return messages

@backoff.on_exception(
backoff.expo, openai.error.RateLimitError, max_tries=7, max_time=45
)
@backoff.on_exception(backoff.expo, openai.RateLimitError, max_tries=7, max_time=45)
def backoff_inference(self, messages):
"""
Perform inference using the language model while implementing an exponential backoff strategy.
Expand Down Expand Up @@ -234,7 +233,6 @@ def _create_chat_model(self) -> BaseChatModel:
model=self.model_name,
temperature=self.temperature,
streaming=self.streaming,
client=openai.ChatCompletion,
callbacks=[StreamingStdOutCallbackHandler()],
)

Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,16 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry.dependencies]
python = ">=3.10,<3.12" # updated for llama-index dependency
openai = "0.28"
openai = ">=1.0.0"
termcolor = "2.3.0"
typer = ">=0.3.2"
rudder-sdk-python = ">=2.0.2"
dataclasses-json = "0.5.7"
tiktoken = ">=0.0.4"
tabulate = "0.9.0"
python-dotenv = ">=0.21.0"
langchain = ">=0.0.335"
langchain = ">=0.1.0"
langchain_openai = ">=0.0.2"
toml = ">=0.10.2"

[tool.poetry.group.dev.dependencies]
Expand Down