|
4 | 4 | from typing import Optional, Union |
5 | 5 |
|
6 | 6 | from .src.audio import Audio |
| 7 | +from .src.responses import Responses |
7 | 8 | from .src.chat import Chat |
8 | 9 | from .src.completions import Completions |
9 | 10 | from .src.detokenize import Detokenize |
|
16 | 17 | from .src.toxicity import Toxicity |
17 | 18 | from .src.pii import Pii |
18 | 19 | from .src.injection import Injection |
| 20 | +from .src.mcp_servers import MCPServers |
| 21 | +from .src.mcp_tools import MCPTools |
19 | 22 | from .src.models import Models |
20 | 23 | from .version import __version__ |
21 | 24 |
|
22 | 25 | __all__ = [ |
23 | | - "PredictionGuard", "Chat", "Completions", "Embeddings", |
24 | | - "Audio", "Documents", "Rerank", "Tokenize", "Translate", |
25 | | - "Detokenize", "Factuality", "Toxicity", "Pii", "Injection", |
| 26 | + "PredictionGuard", "Responses", "Chat", "Completions", "Embeddings", |
| 27 | + "Audio", "Documents", "Rerank", "Tokenize", "Translate", "Detokenize", |
| 28 | + "Factuality", "Toxicity", "Pii", "Injection", "MCPServers", "MCPTools", |
26 | 29 | "Models" |
27 | 30 | ] |
28 | 31 |
|
@@ -81,11 +84,14 @@ def __init__( |
81 | 84 | self._connect_client() |
82 | 85 |
|
83 | 86 | # Pass Prediction Guard class variables to inner classes |
| 87 | + self.responses: Responses = Responses(self.api_key, self.url, self.timeout) |
| 88 | + """Responses allows for the usage of LLMs intended for agentic usages.""" |
| 89 | + |
84 | 90 | self.chat: Chat = Chat(self.api_key, self.url, self.timeout) |
85 | | - """Chat generates chat completions based on a conversation history""" |
| 91 | + """Chat generates chat completions based on a conversation history.""" |
86 | 92 |
|
87 | 93 | self.completions: Completions = Completions(self.api_key, self.url, self.timeout) |
88 | | - """Completions generates text completions based on the provided input""" |
| 94 | + """Completions generates text completions based on the provided input.""" |
89 | 95 |
|
90 | 96 | self.embeddings: Embeddings = Embeddings(self.api_key, self.url, self.timeout) |
91 | 97 | """Embedding generates chat completions based on a conversation history.""" |
@@ -120,6 +126,12 @@ def __init__( |
120 | 126 | self.detokenize: Detokenize = Detokenize(self.api_key, self.url, self.timeout) |
121 | 127 | """Detokenizes generates text for input tokens.""" |
122 | 128 |
|
| 129 | + self.mcp_servers: MCPServers = MCPServers(self.api_key, self.url, self.timeout) |
| 130 | + """MCPServers lists all the MCP servers available in the Prediction Guard API.""" |
| 131 | + |
| 132 | + self.mcp_tools: MCPTools = MCPTools(self.api_key, self.url, self.timeout) |
| 133 | + """MCPTools lists all the MCP tools available in the Prediction Guard API.""" |
| 134 | + |
123 | 135 | self.models: Models = Models(self.api_key, self.url, self.timeout) |
124 | 136 | """Models lists all of the models available in the Prediction Guard API.""" |
125 | 137 |
|
|
0 commit comments