@@ -11,60 +11,78 @@ This guide focuses primarily on configuring and using various LLM clients suppor
1111## OpenAI GPT-4 Client Setup
1212
1313``` python
14- import giskard
1514import os
15+ import giskard
1616from giskard.llm.client.openai import OpenAIClient
1717
18+ # Set the OpenAI API key
1819os.environ[" OPENAI_API_KEY" ] = " sk-…"
1920
21+ # Create a giskard OpenAI client
22+ openai_client = OpenAIClient(model = " gpt-4o" )
23+
24+ # Set the default client
2025giskard.llm.set_llm_api(" openai" )
21- oc = OpenAIClient(model = " gpt-4-turbo-preview" )
22- giskard.llm.set_default_client(oc)
26+ giskard.llm.set_default_client(openai_client)
2327```
2428
2529## Azure OpenAI Client Setup
2630
2731``` python
2832import os
29- from giskard.llm import set_llm_model
30- from giskard.llm.embeddings.openai import set_embedding_model
33+ import giskard
3134
35+ # Set the Azure OpenAI API key and endpoint
3236os.environ[' AZURE_OPENAI_API_KEY' ] = ' ...'
3337os.environ[' AZURE_OPENAI_ENDPOINT' ] = ' https://xxx.openai.azure.com'
3438os.environ[' OPENAI_API_VERSION' ] = ' 2023-07-01-preview'
3539
36-
3740# You'll need to provide the name of the model that you've deployed
3841# Beware, the model provided must be capable of using function calls
39- set_llm_model(' my-gpt-4-model' )
40- set_embedding_model(' my-embedding-model' ) # Optional
42+ giskard.llm. set_llm_model(' my-gpt-4-model' )
43+ giskard.llm.embeddings. set_embedding_model(' my-embedding-model' )
4144```
4245
4346## Mistral Client Setup
4447
4548``` python
4649import os
50+ import giskard
4751from giskard.llm.client.mistral import MistralClient
4852
49- os.environ[" MISTRAL_API_KEY" ] = " sk-…"
53+ # Set the Mistral API key
54+ os.environ[" MISTRAL_API_KEY" ] = " …"
5055
51- mc = MistralClient()
52- giskard.llm.set_default_client(mc)
56+ # Create a giskard Mistral client
57+ mistral_client = MistralClient()
58+
59+ # Set the default client
60+ giskard.llm.set_default_client(mistral_client)
61+
62+ # You may also want to set the default embedding model
63+ # Check the Custom Client Setup section for more details
5364```
5465
5566## Ollama Client Setup
5667
5768The Ollama setup involves configuring an OpenAI client customized for the Ollama API:
5869
5970``` python
71+ import giskard
6072from openai import OpenAI
6173from giskard.llm.client.openai import OpenAIClient
62- from giskard.llm.client.mistral import MistralClient
74+ from giskard.llm.embeddings.openai import OpenAIEmbedding
6375
64- # Setup the Ollama client with API key and base URL
76+ # Setup the OpenAI client with API key and base URL for Ollama
6577_client = OpenAI(base_url = " http://localhost:11434/v1/" , api_key = " ollama" )
66- oc = OpenAIClient(model = " gemma:2b" , client = _client)
67- giskard.llm.set_default_client(oc)
78+
79+ # Wrap the original OpenAI client with giskard OpenAI client and embedding
80+ llm_client = OpenAIClient(model = " llama3.2" , client = _client)
81+ embed_client = OpenAIEmbedding(model = " nomic-embed-text" , client = _client)
82+
83+ # Set the default client and embedding
84+ giskard.llm.set_default_client(llm_client)
85+ giskard.llm.embeddings.set_default_embedding(embed_client)
6886```
6987
7088## Claude 3 Client Setup
@@ -78,28 +96,41 @@ import giskard
7896
7997from giskard.llm.client.bedrock import ClaudeBedrockClient
8098from giskard.llm.embeddings.bedrock import BedrockEmbedding
81- from giskard.llm.embeddings import set_default_embedding
8299
100+ # Create a Bedrock client
83101bedrock_runtime = boto3.client(" bedrock-runtime" , region_name = os.environ[" AWS_DEFAULT_REGION" ])
102+
103+ # Wrap the Beddock client with giskard Bedrock client and embedding
84104claude_client = ClaudeBedrockClient(bedrock_runtime, model = " anthropic.claude-3-haiku-20240307-v1:0" )
85105embed_client = BedrockEmbedding(bedrock_runtime, model = " amazon.titan-embed-text-v1" )
106+
107+ # Set the default client and embedding
86108giskard.llm.set_default_client(claude_client)
87- set_default_embedding(embed_client)
109+ giskard.llm.embeddings. set_default_embedding(embed_client)
88110```
89111
90112## Gemini Client Setup
91113
92114``` python
93115import os
94116import giskard
95-
96117import google.generativeai as genai
97-
98118from giskard.llm.client.gemini import GeminiClient
99119
120+ # Set the Gemini API key
121+ os.environ[" GEMINI_API_KEY" ] = " …"
122+
123+ # Configure the Gemini API
100124genai.configure(api_key = os.environ[" GEMINI_API_KEY" ])
101125
102- giskard.llm.set_default_client(GeminiClient())
126+ # Create a giskard Gemini client
127+ gemini_client = GeminiClient()
128+
129+ # Set the default client
130+ giskard.llm.set_default_client(gemini_client)
131+
132+ # You may also want to set the default embedding model
133+ # Check the Custom Client Setup section for more details
103134```
104135
105136## Custom Client Setup
@@ -110,7 +141,7 @@ from typing import Sequence, Optional
110141from giskard.llm.client import set_default_client
111142from giskard.llm.client.base import LLMClient, ChatMessage
112143
113-
144+ # Create a custom client by extending the LLMClient class
114145class MyLLMClient (LLMClient ):
115146 def __init__ (self , my_client ):
116147 self ._client = my_client
@@ -155,7 +186,17 @@ class MyLLMClient(LLMClient):
155186
156187 return ChatMessage(role = " assistant" , message = data[" completion" ])
157188
158- set_default_client(MyLLMClient())
189+ # Create an instance of the custom client
190+ llm_client = MyLLMClient()
191+
192+ # Set the default client
193+ set_default_client(llm_client)
194+
195+ # It's also possible to create a custom embedding class extending BaseEmbedding
196+ # Or you can use FastEmbed for a pre-built embedding model:
197+ from giskard.llm.embeddings.fastembed import try_get_fastembed_embeddings
198+ embed_client = try_get_fastembed_embeddings()
199+ giskard.llm.embeddings.set_default_embedding(embed_client)
159200```
160201
161202If you run into any issues configuring the LLM client, don't hesitate to [ ask us on Discord] ( https://discord.com/invite/ABvfpbu69R ) or open a new issue on [ our GitHub repo] ( https://github.com/Giskard-AI/giskard ) .
0 commit comments