Skip to content

Commit

Permalink
chore: Update dependencies and add Portkey AI
Browse files Browse the repository at this point in the history
- Updated tiktoken dependency to version 0.7.0
- Added portkey-ai dependency
- Updated pytest workflow to use Poetry for dependency management
- Added Azure OpenAI API keys and endpoint to pytest workflow
- Added Portkey AI client initialization in examples
- Added Portkey AI settings in config
- Added Portkey AI client factory method in LLMFactory
  • Loading branch information
pyrotank41 committed Sep 28, 2024
1 parent 0039cd2 commit 8ca6ed7
Show file tree
Hide file tree
Showing 7 changed files with 283 additions and 42 deletions.
24 changes: 24 additions & 0 deletions ava_mosaic_ai/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ class LLMProvider(str, Enum):
ANTHROPIC = "anthropic"
LLAMA = "llama"
AZURE_OPENAI = "azure_openai"

# llms that are used with portkey
PORTKEY_AZURE_OPENAI = "portkey_azure"
PORTKEY_ANTHROPIC = "portkey_anthropic"


class LLMProviderSettings(BaseModel):
Expand Down Expand Up @@ -43,6 +47,17 @@ class AzureOpenAISettings(LLMProviderSettings):
api_version: str = Field(default="2024-02-15-preview")
default_model: str = Field(default="gpt-4o")
azure_endpoint: str

class PortkeyAzureOpenAISettings(LLMProviderSettings):
api_key: str
virtual_api_key: str
default_model: str = Field(default="gpt-4o")

class PortkeyAnthropicSettings(LLMProviderSettings):
api_key: str
virtual_api_key: str
default_model: str = Field(default="claude-3-5-sonnet-20240620")
max_tokens: int = Field(default=1024)


class Settings(BaseModel):
Expand Down Expand Up @@ -83,6 +98,15 @@ def get_provider_settings(self, provider: LLMProvider) -> Any:
azure_endpoint=endpoint,
default_model=deployment_name,
)
elif provider == LLMProvider.PORTKEY_AZURE_OPENAI:
api_key = os.environ.get("PORTKEY_API_KEY")
virtual_api_key = os.environ.get("PORTKEY_VIRTUAL_API_KEY")
self._providers[provider] = PortkeyAzureOpenAISettings(api_key=api_key, virtual_api_key=virtual_api_key)

elif provider == LLMProvider.PORTKEY_ANTHROPIC:
api_key = os.environ.get("PORTKEY_API_KEY")
virtual_api_key = os.environ.get("PORTKEY_VIRTUAL_API_KEY")
self._providers[provider] = PortkeyAnthropicSettings(api_key=api_key, virtual_api_key=virtual_api_key)
return self._providers[provider]


Expand Down
63 changes: 58 additions & 5 deletions ava_mosaic_ai/llm_factory.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,45 @@
from typing import Any, Dict, List, Type, Union
from typing import Any, Dict, List, Optional, Type, TypeVar, Union
import instructor
from anthropic import Anthropic
from openai import OpenAI, AzureOpenAI
from pydantic import BaseModel, Field
from ava_mosaic_ai.utils.utils import get_llm_provider
from ava_mosaic_ai.config.settings import LLMProvider, get_settings
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
from instructor import Instructor
from anthropic import Anthropic


def get_portkey_azure_openai_client(portkey_api_key: str, virtual_api_key: str):
portkey = OpenAI(
api_key="gpt-4o-2024-08-bc0138",
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
# provider="openai",
virtual_key="gpt-4o-2024-08-bc0138",
api_key="KPhEnBPeKn5DQneaHaC6LLcfrGgw",
),
)
return instructor.from_openai(portkey)


class LLMFactory:
def __init__(self, provider: Union[LLMProvider, str]) -> None:
def __init__(
self,
provider: Union[LLMProvider, str],
metadata: Optional[dict] = None
) -> None:
if isinstance(provider, str):
provider = get_llm_provider(provider)

self.metadata = metadata
print(f"metadata: {metadata}")
self.provider = provider
self.settings = get_settings().get_provider_settings(provider)
self._api_key = self.settings.api_key
self.client = self._initialize_client()

def _initialize_client(self) -> Any:
def _initialize_client(self) -> Instructor:
client_initializers = {
LLMProvider.OPENAI: lambda: instructor.from_openai(
OpenAI(api_key=self._api_key)
Expand All @@ -36,6 +58,31 @@ def _initialize_client(self) -> Any:
api_version=self.settings.api_version,
)
),
LLMProvider.PORTKEY_AZURE_OPENAI: lambda: instructor.from_openai(
OpenAI(
api_key=self.settings.virtual_api_key,
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="openai",
virtual_key=self.settings.virtual_api_key,
api_key=self._api_key,
metadata=self.metadata,
),
)
),
LLMProvider.PORTKEY_ANTHROPIC: lambda: instructor.from_anthropic(
Anthropic(
api_key=self.settings.virtual_api_key,
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="anthropic",
virtual_key=self.settings.virtual_api_key,
api_key=self._api_key,
metadata=self.metadata,
),
)
# .with_options(self.metadata)
),
}

initializer = client_initializers.get(self.provider)
Expand All @@ -53,9 +100,11 @@ def api_key(self, value):
self._api_key = value
self.client = self._initialize_client()

T = TypeVar("T", bound=BaseModel)

def create_completion(
self, response_model: Type[BaseModel], messages: List[Dict[str, str]], **kwargs
) -> Any:
self, response_model: Type[T], messages: List[Dict[str, str]], **kwargs
) -> T:
completion_params = {
"model": kwargs.get("model", self.settings.default_model),
"temperature": kwargs.get("temperature", self.settings.temperature),
Expand All @@ -64,4 +113,8 @@ def create_completion(
"response_model": response_model,
"messages": messages,
}
# metadata = kwargs.get("metadata", None)
# client_with_metadata = self.client.with_options(metadata={"_user": "user_12345", "custom_field": "custom_value"})
# if metadata is not None:
# return client_with_metadata.chat.completions.create(**completion_params)
return self.client.chat.completions.create(**completion_params)
29 changes: 29 additions & 0 deletions examples/portkey.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import uuid
from pydantic import BaseModel
from ava_mosaic_ai.config.settings import LLMProvider
from ava_mosaic_ai.llm_factory import LLMFactory

# creating metadata for portkey client. This is optional
metadata = {"_user": "[email protected]",
"environment": "development",
"session_id": str(uuid.uuid4())}

# client = LLMFactory(LLMProvider.PORTKEY_AZURE_OPENAI, metadata=metadata)
# client = LLMFactory(LLMProvider.AZURE_OPENAI)
client = LLMFactory(LLMProvider.PORTKEY_ANTHROPIC, metadata=metadata)

class User(BaseModel):
name: str
age: int

# client = instructor.from_openai(portkey)
user_info = client.create_completion(
# model="gpt-4o",
max_tokens=1024,
response_model=User,
messages=[{"role": "user", "content": "John Doe is 30 years old."}],
metadata=metadata,
)

print(user_info.name)
print(user_info.age)
28 changes: 28 additions & 0 deletions examples/simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from pydantic import Field, BaseModel
import ava_mosaic_ai

import warnings
warnings.filterwarnings('error', category=UserWarning)

class CompletionModel(BaseModel):
response: str = Field(description="Your response to the user.")
reasoning: str = Field(description="Explain your reasoning for the response.")

messages = [
{"role": "system", "content": "You are a helpful assistant."},
{
"role": "user",
"content": "If it takes 2 hours to dry 1 shirt out in the sun, how long will it take to dry 5 shirts?",
},
]

llm = ava_mosaic_ai.get_llm(provider = "azure_openai")
llm.settings.default_model = "gpt-4o"
completion = llm.create_completion(
response_model=CompletionModel,
messages=messages,
)
assert isinstance(completion, CompletionModel)

print(f"Response: {completion.response}\n")
print(f"Reasoning: {completion.reasoning}")
54 changes: 19 additions & 35 deletions playground.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,24 @@
# from pydantic import Field, BaseModel
# import ava_mosaic_ai
import uuid
from pydantic import BaseModel
from ava_mosaic_ai.config.settings import LLMProvider
from ava_mosaic_ai.llm_factory import LLMFactory

# import warnings
# warnings.filterwarnings('error', category=UserWarning)
# client = LLMFactory(LLMProvider.PORTKEY_AZURE_OPENAI)
# client = LLMFactory(LLMProvider.AZURE_OPENAI)

# class CompletionModel(BaseModel):
# response: str = Field(description="Your response to the user.")
# reasoning: str = Field(description="Explain your reasoning for the response.")
client = LLMFactory(LLMProvider.PORTKEY_ANTHROPIC, metadata=metadata)

# messages = [
# {"role": "system", "content": "You are a helpful assistant."},
# {
# "role": "user",
# "content": "If it takes 2 hours to dry 1 shirt out in the sun, how long will it take to dry 5 shirts?",
# },
# ]
class User(BaseModel):
name: str
age: int

# llm = ava_mosaic_ai.get_llm(provider = "azure_openai")
# llm.settings.default_model = "gpt-4o"
# completion = llm.create_completion(
# response_model=CompletionModel,
# messages=messages,
# )
# assert isinstance(completion, CompletionModel)
# client = instructor.from_openai(portkey)
user_info = client.create_completion(
# model="gpt-4o",
max_tokens=1024,
response_model=User,
messages=[{"role": "user", "content": "John Doe is 30 years old."}],
)

# print(f"Response: {completion.response}\n")
# print(f"Reasoning: {completion.reasoning}")

import os
from dotenv import load_dotenv
from openai import OpenAI
# load_dotenv()

client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY", "something"))
try:
response = client.models.list()
print("API key is valid. Available models:", [model.id for model in response])
except Exception as e:
print("API key validation failed:", str(e))
print(user_info.name)
print(user_info.age)
Loading

0 comments on commit 8ca6ed7

Please sign in to comment.