44import pydantic
55import pytest
66from google .generativeai .types import ContentDict
7- from mistralai .models .chat_completion import ChatCompletionResponse , ChatCompletionResponseChoice
8- from mistralai .models .chat_completion import ChatMessage as MistralChatMessage
9- from mistralai .models .chat_completion import FinishReason , UsageInfo
107from openai .types import CompletionUsage
118from openai .types .chat import ChatCompletion , ChatCompletionMessage
129from openai .types .chat .chat_completion import Choice
3532)
3633
3734
38- DEMO_MISTRAL_RESPONSE = ChatCompletionResponse (
39- id = "2d62260a7a354e02922a4f6ad36930d3" ,
40- object = "chat.completion" ,
41- created = 1630000000 ,
42- model = "mistral-large" ,
43- choices = [
44- ChatCompletionResponseChoice (
45- index = 0 ,
46- message = MistralChatMessage (role = "assistant" , content = "This is a test!" , name = None , tool_calls = None ),
47- finish_reason = FinishReason .stop ,
48- )
49- ],
50- usage = UsageInfo (prompt_tokens = 9 , total_tokens = 89 , completion_tokens = 80 ),
51- )
52-
53-
5435def test_llm_complete_message ():
5536 client = Mock ()
5637 client .chat .completions .create .return_value = DEMO_OPENAI_RESPONSE
@@ -69,19 +50,36 @@ def test_llm_complete_message():
6950
7051@pytest .mark .skipif (not PYDANTIC_V2 , reason = "Mistral raise an error with pydantic < 2" )
7152def test_mistral_client ():
53+ from mistralai .models import ChatCompletionChoice , ChatCompletionResponse , UsageInfo
54+
55+ demo_response = ChatCompletionResponse (
56+ id = "2d62260a7a354e02922a4f6ad36930d3" ,
57+ object = "chat.completion" ,
58+ created = 1630000000 ,
59+ model = "mistral-large" ,
60+ choices = [
61+ ChatCompletionChoice (
62+ index = 0 ,
63+ message = {"role" : "assistant" , "content" : "This is a test!" },
64+ finish_reason = "stop" ,
65+ )
66+ ],
67+ usage = UsageInfo (prompt_tokens = 9 , total_tokens = 89 , completion_tokens = 80 ),
68+ )
69+
7270 client = Mock ()
73- client .chat .return_value = DEMO_MISTRAL_RESPONSE
71+ client .chat .complete . return_value = demo_response
7472
7573 from giskard .llm .client .mistral import MistralClient
7674
7775 res = MistralClient (model = "mistral-large" , client = client ).complete (
7876 [ChatMessage (role = "user" , content = "Hello" )], temperature = 0.11 , max_tokens = 12
7977 )
8078
81- client .chat .assert_called_once ()
82- assert client .chat .call_args [1 ]["messages" ] == [MistralChatMessage ( role = " user" , content = " Hello") ]
83- assert client .chat .call_args [1 ]["temperature" ] == 0.11
84- assert client .chat .call_args [1 ]["max_tokens" ] == 12
79+ client .chat .complete . assert_called_once ()
80+ assert client .chat .complete . call_args [1 ]["messages" ] == [{ " role" : " user" , " content" : " Hello"} ]
81+ assert client .chat .complete . call_args [1 ]["temperature" ] == 0.11
82+ assert client .chat .complete . call_args [1 ]["max_tokens" ] == 12
8583
8684 assert isinstance (res , ChatMessage )
8785 assert res .content == "This is a test!"
0 commit comments