Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
henchaves authored Oct 18, 2024
2 parents fe0e2b0 + 750db5c commit a44bca4
Show file tree
Hide file tree
Showing 8 changed files with 235 additions and 134 deletions.
2 changes: 1 addition & 1 deletion .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ build:
jobs:
post_create_environment:
- pip install pdm
- pdm export --without-hashes -G doc,hub -o requirements-sphinx.txt
- pdm export --without-hashes -G doc,llm -o requirements-sphinx.txt

sphinx:
configuration: docs/conf.py
Expand Down
80 changes: 60 additions & 20 deletions docs/open_source/scan/scan_llm/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,19 @@ like this:
::::::{tab-item} OpenAI

```python
import giskard
import os
import giskard
from giskard.llm.client.openai import OpenAIClient

# Set the OpenAI API key
os.environ["OPENAI_API_KEY"] = "sk-…"

# Create a giskard OpenAI client
openai_client = OpenAIClient(model="gpt-4o")

# Set the default client
giskard.llm.set_llm_api("openai")
oc = OpenAIClient(model="gpt-4-turbo-preview")
giskard.llm.set_default_client(oc)
giskard.llm.set_default_client(openai_client)
```

::::::
Expand All @@ -73,30 +77,38 @@ Require `openai>=1.0.0`

```python
import os
from giskard.llm import set_llm_model
from giskard.llm.embeddings.openai import set_embedding_model
import giskard

# Set the Azure OpenAI API key and endpoint
os.environ['AZURE_OPENAI_API_KEY'] = '...'
os.environ['AZURE_OPENAI_ENDPOINT'] = 'https://xxx.openai.azure.com'
os.environ['OPENAI_API_VERSION'] = '2023-07-01-preview'

# You'll need to provide the name of the model that you've deployed
# Beware, the model provided must be capable of using function calls
set_llm_model('my-gpt-4-model')
set_embedding_model('my-embedding-model') # Optional
giskard.llm.set_llm_model('my-gpt-4-model')
giskard.llm.embeddings.set_embedding_model('my-embedding-model')
```

::::::
::::::{tab-item} Mistral

```python
import os
import giskard
from giskard.llm.client.mistral import MistralClient

os.environ["MISTRAL_API_KEY"] = "sk-…"
# Set the Mistral API key
os.environ["MISTRAL_API_KEY"] = ""

mc = MistralClient()
giskard.llm.set_default_client(mc)
# Create a giskard Mistral client
mistral_client = MistralClient()

# Set the default client
giskard.llm.set_default_client(mistral_client)

# You may also want to set the default embedding model
# Check the Custom Client code snippet for more details
```

::::::
Expand All @@ -106,11 +118,18 @@ giskard.llm.set_default_client(mc)
import giskard
from openai import OpenAI
from giskard.llm.client.openai import OpenAIClient
from giskard.llm.embeddings.openai import OpenAIEmbedding

# Setup the Ollama client with API key and base URL
# Setup the OpenAI client with API key and base URL for Ollama
_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama")
oc = OpenAIClient(model="gemma:2b", client=_client)
giskard.llm.set_default_client(oc)

# Wrap the original OpenAI client with giskard OpenAI client and embedding
llm_client = OpenAIClient(model="llama3.2", client=_client)
embed_client = OpenAIEmbedding(model="nomic-embed-text", client=_client)

# Set the default client and embedding
giskard.llm.set_default_client(llm_client)
giskard.llm.embeddings.set_default_embedding(embed_client)
```

::::::
Expand All @@ -123,13 +142,17 @@ import giskard

from giskard.llm.client.bedrock import ClaudeBedrockClient
from giskard.llm.embeddings.bedrock import BedrockEmbedding
from giskard.llm.embeddings import set_default_embedding

# Create a Bedrock client
bedrock_runtime = boto3.client("bedrock-runtime", region_name=os.environ["AWS_DEFAULT_REGION"])

# Wrap the Beddock client with giskard Bedrock client and embedding
claude_client = ClaudeBedrockClient(bedrock_runtime, model="anthropic.claude-3-haiku-20240307-v1:0")
embed_client = BedrockEmbedding(bedrock_runtime, model="amazon.titan-embed-text-v1")

# Set the default client and embedding
giskard.llm.set_default_client(claude_client)
set_default_embedding(embed_client)
giskard.llm.embeddings.set_default_embedding(embed_client)
```

::::::
Expand All @@ -138,14 +161,23 @@ set_default_embedding(embed_client)
```python
import os
import giskard

import google.generativeai as genai

from giskard.llm.client.gemini import GeminiClient

# Set the Gemini API key
os.environ["GEMINI_API_KEY"] = ""

# Configure the Gemini API
genai.configure(api_key=os.environ["GEMINI_API_KEY"])

giskard.llm.set_default_client(GeminiClient())
# Create a giskard Gemini client
gemini_client = GeminiClient()

# Set the default client
giskard.llm.set_default_client(gemini_client)

# You may also want to set the default embedding model
# Check the Custom Client code snippet for more details
```

::::::
Expand All @@ -157,7 +189,7 @@ from typing import Sequence, Optional
from giskard.llm.client import set_default_client
from giskard.llm.client.base import LLMClient, ChatMessage


# Create a custom client by extending the LLMClient class
class MyLLMClient(LLMClient):
def __init__(self, my_client):
self._client = my_client
Expand Down Expand Up @@ -202,9 +234,17 @@ class MyLLMClient(LLMClient):

return ChatMessage(role="assistant", message=data["completion"])

# Create an instance of the custom client
llm_client = MyLLMClient()

set_default_client(MyLLMClient())
# Set the default client
set_default_client(llm_client)

# It's also possible to create a custom embedding class extending BaseEmbedding
# Or you can use FastEmbed for a pre-built embedding model:
from giskard.llm.embeddings.fastembed import try_get_fastembed_embeddings
embed_client = try_get_fastembed_embeddings()
giskard.llm.embeddings.set_default_embedding(embed_client)
```

::::::
Expand Down
85 changes: 63 additions & 22 deletions docs/open_source/setting_up/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,60 +11,78 @@ This guide focuses primarily on configuring and using various LLM clients suppor
## OpenAI GPT-4 Client Setup

```python
import giskard
import os
import giskard
from giskard.llm.client.openai import OpenAIClient

# Set the OpenAI API key
os.environ["OPENAI_API_KEY"] = "sk-…"

# Create a giskard OpenAI client
openai_client = OpenAIClient(model="gpt-4o")

# Set the default client
giskard.llm.set_llm_api("openai")
oc = OpenAIClient(model="gpt-4-turbo-preview")
giskard.llm.set_default_client(oc)
giskard.llm.set_default_client(openai_client)
```

## Azure OpenAI Client Setup

```python
import os
from giskard.llm import set_llm_model
from giskard.llm.embeddings.openai import set_embedding_model
import giskard

# Set the Azure OpenAI API key and endpoint
os.environ['AZURE_OPENAI_API_KEY'] = '...'
os.environ['AZURE_OPENAI_ENDPOINT'] = 'https://xxx.openai.azure.com'
os.environ['OPENAI_API_VERSION'] = '2023-07-01-preview'


# You'll need to provide the name of the model that you've deployed
# Beware, the model provided must be capable of using function calls
set_llm_model('my-gpt-4-model')
set_embedding_model('my-embedding-model') # Optional
giskard.llm.set_llm_model('my-gpt-4-model')
giskard.llm.embeddings.set_embedding_model('my-embedding-model')
```

## Mistral Client Setup

```python
import os
import giskard
from giskard.llm.client.mistral import MistralClient

os.environ["MISTRAL_API_KEY"] = "sk-…"
# Set the Mistral API key
os.environ["MISTRAL_API_KEY"] = ""

mc = MistralClient()
giskard.llm.set_default_client(mc)
# Create a giskard Mistral client
mistral_client = MistralClient()

# Set the default client
giskard.llm.set_default_client(mistral_client)

# You may also want to set the default embedding model
# Check the Custom Client Setup section for more details
```

## Ollama Client Setup

The Ollama setup involves configuring an OpenAI client customized for the Ollama API:

```python
import giskard
from openai import OpenAI
from giskard.llm.client.openai import OpenAIClient
from giskard.llm.client.mistral import MistralClient
from giskard.llm.embeddings.openai import OpenAIEmbedding

# Setup the Ollama client with API key and base URL
# Setup the OpenAI client with API key and base URL for Ollama
_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama")
oc = OpenAIClient(model="gemma:2b", client=_client)
giskard.llm.set_default_client(oc)

# Wrap the original OpenAI client with giskard OpenAI client and embedding
llm_client = OpenAIClient(model="llama3.2", client=_client)
embed_client = OpenAIEmbedding(model="nomic-embed-text", client=_client)

# Set the default client and embedding
giskard.llm.set_default_client(llm_client)
giskard.llm.embeddings.set_default_embedding(embed_client)
```

## Claude 3 Client Setup
Expand All @@ -78,28 +96,41 @@ import giskard

from giskard.llm.client.bedrock import ClaudeBedrockClient
from giskard.llm.embeddings.bedrock import BedrockEmbedding
from giskard.llm.embeddings import set_default_embedding

# Create a Bedrock client
bedrock_runtime = boto3.client("bedrock-runtime", region_name=os.environ["AWS_DEFAULT_REGION"])

# Wrap the Beddock client with giskard Bedrock client and embedding
claude_client = ClaudeBedrockClient(bedrock_runtime, model="anthropic.claude-3-haiku-20240307-v1:0")
embed_client = BedrockEmbedding(bedrock_runtime, model="amazon.titan-embed-text-v1")

# Set the default client and embedding
giskard.llm.set_default_client(claude_client)
set_default_embedding(embed_client)
giskard.llm.embeddings.set_default_embedding(embed_client)
```

## Gemini Client Setup

```python
import os
import giskard

import google.generativeai as genai

from giskard.llm.client.gemini import GeminiClient

# Set the Gemini API key
os.environ["GEMINI_API_KEY"] = ""

# Configure the Gemini API
genai.configure(api_key=os.environ["GEMINI_API_KEY"])

giskard.llm.set_default_client(GeminiClient())
# Create a giskard Gemini client
gemini_client = GeminiClient()

# Set the default client
giskard.llm.set_default_client(gemini_client)

# You may also want to set the default embedding model
# Check the Custom Client Setup section for more details
```

## Custom Client Setup
Expand All @@ -110,7 +141,7 @@ from typing import Sequence, Optional
from giskard.llm.client import set_default_client
from giskard.llm.client.base import LLMClient, ChatMessage


# Create a custom client by extending the LLMClient class
class MyLLMClient(LLMClient):
def __init__(self, my_client):
self._client = my_client
Expand Down Expand Up @@ -155,7 +186,17 @@ class MyLLMClient(LLMClient):

return ChatMessage(role="assistant", message=data["completion"])

set_default_client(MyLLMClient())
# Create an instance of the custom client
llm_client = MyLLMClient()

# Set the default client
set_default_client(llm_client)

# It's also possible to create a custom embedding class extending BaseEmbedding
# Or you can use FastEmbed for a pre-built embedding model:
from giskard.llm.embeddings.fastembed import try_get_fastembed_embeddings
embed_client = try_get_fastembed_embeddings()
giskard.llm.embeddings.set_default_embedding(embed_client)
```

If you run into any issues configuring the LLM client, don't hesitate to [ask us on Discord](https://discord.com/invite/ABvfpbu69R) or open a new issue on [our GitHub repo](https://github.com/Giskard-AI/giskard).
Loading

0 comments on commit a44bca4

Please sign in to comment.