44import pydantic
55import pytest
66from google .generativeai .types import ContentDict
7- from mistralai .models import ChatCompletionChoice , ChatCompletionResponse , UsageInfo
87from openai .types import CompletionUsage
98from openai .types .chat import ChatCompletion , ChatCompletionMessage
109from openai .types .chat .chat_completion import Choice
3332)
3433
3534
36- DEMO_MISTRAL_RESPONSE = ChatCompletionResponse (
37- id = "2d62260a7a354e02922a4f6ad36930d3" ,
38- object = "chat.completion" ,
39- created = 1630000000 ,
40- model = "mistral-large" ,
41- choices = [
42- ChatCompletionChoice (
43- index = 0 ,
44- message = {"role" : "assistant" , "content" : "This is a test!" },
45- finish_reason = "stop" ,
46- )
47- ],
48- usage = UsageInfo (prompt_tokens = 9 , total_tokens = 89 , completion_tokens = 80 ),
49- )
50-
51-
5235def test_llm_complete_message ():
5336 client = Mock ()
5437 client .chat .completions .create .return_value = DEMO_OPENAI_RESPONSE
@@ -67,8 +50,25 @@ def test_llm_complete_message():
6750
6851@pytest .mark .skipif (not PYDANTIC_V2 , reason = "Mistral raise an error with pydantic < 2" )
6952def test_mistral_client ():
53+ from mistralai .models import ChatCompletionChoice , ChatCompletionResponse , UsageInfo
54+
55+ demo_response = ChatCompletionResponse (
56+ id = "2d62260a7a354e02922a4f6ad36930d3" ,
57+ object = "chat.completion" ,
58+ created = 1630000000 ,
59+ model = "mistral-large" ,
60+ choices = [
61+ ChatCompletionChoice (
62+ index = 0 ,
63+ message = {"role" : "assistant" , "content" : "This is a test!" },
64+ finish_reason = "stop" ,
65+ )
66+ ],
67+ usage = UsageInfo (prompt_tokens = 9 , total_tokens = 89 , completion_tokens = 80 ),
68+ )
69+
7070 client = Mock ()
71- client .chat .complete .return_value = DEMO_MISTRAL_RESPONSE
71+ client .chat .complete .return_value = demo_response
7272
7373 from giskard .llm .client .mistral import MistralClient
7474
0 commit comments