Skip to content
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
4e39ee1
Added langchain agent to talk to Giskard models
kevinmessiaen Aug 17, 2023
773d58a
Added talk method and made dataset optional
kevinmessiaen Aug 17, 2023
cf48141
Added optional query method and dynamic llm param
kevinmessiaen Aug 17, 2023
8de0de5
WIP: doc to talk to your model
kevinmessiaen Aug 17, 2023
e7facdd
Added possibility to get model information based on the scan result
kevinmessiaen Aug 17, 2023
07e80e9
Fixed issue with doc and improved reliability descriptions
kevinmessiaen Aug 17, 2023
11e586a
Merge branch 'main' into poc/talk-to-my-ml
mattbit Aug 18, 2023
e3e06c1
Code improvement
kevinmessiaen Aug 21, 2023
cc92de6
Reduce boilerplate to get wrapped titanic model and dataset
kevinmessiaen Aug 21, 2023
0ea60d1
Added small description in the doc
kevinmessiaen Aug 21, 2023
7409152
Externalized LLM configuration
kevinmessiaen Aug 21, 2023
d272696
Added export to set_default_llm
kevinmessiaen Aug 21, 2023
15918bf
Model.talk is now usable "out of the box"
kevinmessiaen Aug 22, 2023
11fd325
Deleted test notebook
kevinmessiaen Aug 22, 2023
f57b074
Improved llm config
kevinmessiaen Aug 22, 2023
1602a27
Fixed doc
kevinmessiaen Aug 22, 2023
1677071
Fixed doc
kevinmessiaen Aug 22, 2023
1f9c1c2
Added test to predict tool
kevinmessiaen Aug 23, 2023
2ee46bc
Added test for scan tools
kevinmessiaen Aug 23, 2023
40dd8ab
Added test to create llm_agent
kevinmessiaen Aug 23, 2023
d42bab0
Added talk method
kevinmessiaen Aug 23, 2023
c84a749
Added talk method
kevinmessiaen Aug 23, 2023
f52baf7
Added talk method
kevinmessiaen Aug 23, 2023
d16491b
Added talk method
kevinmessiaen Aug 23, 2023
cb40535
Removed unused code
kevinmessiaen Aug 23, 2023
1166c47
Add test for error without llm nor api key
kevinmessiaen Aug 23, 2023
e1a381e
Removed deleted export on demo module
kevinmessiaen Aug 24, 2023
ed1e51c
Added examples with scan
kevinmessiaen Aug 24, 2023
384a083
Merge branch 'main' into poc/talk-to-my-ml
kevinmessiaen Sep 4, 2023
a190b5f
Merge branch 'main' into poc/talk-to-my-ml
Sep 6, 2023
0429856
clarify text and examples
luca-martial Sep 6, 2023
84c9c5a
Added documentation and fixed scan report
kevinmessiaen Sep 7, 2023
4616206
Better generated response thanks to scan result
kevinmessiaen Sep 7, 2023
f1104f7
Merge branch 'main' into poc/talk-to-my-ml
kevinmessiaen Sep 13, 2023
8d798d3
Added test for when Python version < 3.9
kevinmessiaen Sep 14, 2023
a662789
Fixed condition
kevinmessiaen Sep 14, 2023
c2b18ea
Added condition for tests
kevinmessiaen Sep 14, 2023
8351d1e
Fixed error
kevinmessiaen Sep 14, 2023
9af32c8
Fixing tests
kevinmessiaen Sep 14, 2023
c2b0cfd
Merge branch 'main' into poc/talk-to-my-ml
kevinmessiaen Sep 14, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions python-client/docs/guides/talk_to_your_model/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# 🗣️Talk to your model

The **Talk to Your Model** feature allow you to engage in a *conversation* with your model in order to ask him to make
predictions, explain those prediction and help understand those predictions.

To talk to your model, you simply need [to wrap it into giskard](../wrap_model/index.md) and call the `talk` method:

```python
import giskard
import os

model, df = giskard.demo.titanic()
giskard_dataset = giskard.Dataset(df=df, target="Survived", name="Titanic dataset")
giskard_model = giskard.Model(model=model, model_type="classification",
name="Titanic model that return yes if survived otherwise no",
classification_labels=['no', 'yes'])

# Set up your OpenAI API key, by default we are using GPT 3.5
os.environ['OPENAI_API_KEY'] = 'sk-...'

# Talk to your model
print(giskard_model.talk('What is this model doing?'))
print(giskard_model.talk('Can you tell me if "Webber, Miss. Susan" survived the titanic crash?', giskard_dataset, True))
print(giskard_model.talk('Can you explain me why you predicted that "Webber, Miss. Susan" survived the titanic crash?',
giskard_dataset, True))
```

## Using a custom language model

By default, GPT 3.5 will be used to generate response. You can use
any [language model](https://python.langchain.com/docs/modules/model_io/models/) by setting the default llm globally
using `giskard.set_default_llm`:

```python
import giskard
from langchain.llms import Cohere

# Create your llm
llm = Cohere()

# Set your llm globally
giskard.llm_config.set_default_llm(llm)
```

2 changes: 1 addition & 1 deletion python-client/docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ guides/scan/index
guides/slice/index
guides/transformation/index
guides/test-suite/index
guides/talk_to_your_model/index
guides/cicd/pipeline
guides/installation_app/index
guides/upload/index
Expand Down Expand Up @@ -131,4 +132,3 @@ integrations/index
::::
:::::


2 changes: 2 additions & 0 deletions python-client/giskard/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from giskard.ml_worker.utils.logging import configure_logging
from giskard.models.automodel import Model
from . import demo
from .llm.config import llm_config
from .ml_worker.utils.network import check_latest_giskard_version
from .scanner import scan
from .utils.analytics_collector import analytics
Expand Down Expand Up @@ -54,4 +55,5 @@ def get_version() -> str:
"TestResult",
"GiskardTest",
"demo",
"llm_config",
]
1 change: 1 addition & 0 deletions python-client/giskard/demo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def linear_pipeline():
"titanic_df",
"titanic",
"titanic_pipeline",
"titanic_wrapped",
"linear_df",
"linear",
"linear_pipeline",
Expand Down
2 changes: 2 additions & 0 deletions python-client/giskard/demo/titanic_classification.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os

import pandas as pd
from sklearn import model_selection
from sklearn.compose import ColumnTransformer
Expand All @@ -16,6 +17,7 @@ def get_df():
df["Survived"] = df["Survived"].apply(lambda x: _classification_labels[x])
return df


def get_test_df():
df = pd.read_csv(os.path.join(os.path.dirname(__file__), "titanic.csv"))
df.drop(["Ticket", "Cabin"], axis=1, inplace=True)
Expand Down
Empty file.
47 changes: 47 additions & 0 deletions python-client/giskard/llm/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import os
from typing import Tuple


def _get_openai():
if "OPENAI_API_KEY" not in os.environ:
raise EnvironmentError(
"""
You're trying to use giskard LLM features without providing any LLM or OpenAI API key.
To use the default OpenAI API please provide the OPENAI_API_KEY environment variable.
To use a custom model, please setup your llm by calling `giskard.set_default_llm(my_llm)`
"""
)

from langchain.llms import OpenAI

return OpenAI(temperature=0.1)


class _TalkConfig:
reliability_thresholds: Tuple[int, int] = 4, 6


class _LlmConfig:
_default_llm = None
talk: _TalkConfig = _TalkConfig()

@property
def default_llm(self):
if self._default_llm is None:
# Lazily call _get_openai as fallback to avoid getting missing key or dependency errors when not required
self._default_llm = _get_openai()

return self._default_llm

def set_default_llm(self, default_llm=None):
from langchain.base_language import BaseLanguageModel

if default_llm is not None and not isinstance(default_llm, BaseLanguageModel):
raise ValueError(
"Please make sure that the default llm provided is instance of `langchain.base_language.BaseLanguageModel`"
)

self._default_llm = default_llm


llm_config = _LlmConfig()
Empty file.
Loading