-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore: Update dependencies and add Portkey AI
- Updated tiktoken dependency to version 0.7.0 - Added portkey-ai dependency - Updated pytest workflow to use Poetry for dependency management - Added Azure OpenAI API keys and endpoint to pytest workflow - Added Portkey AI client initialization in examples - Added Portkey AI settings in config - Added Portkey AI client factory method in LLMFactory
- Loading branch information
1 parent
0039cd2
commit 8ca6ed7
Showing
7 changed files
with
283 additions
and
42 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import uuid | ||
from pydantic import BaseModel | ||
from ava_mosaic_ai.config.settings import LLMProvider | ||
from ava_mosaic_ai.llm_factory import LLMFactory | ||
|
||
# creating metadata for portkey client. This is optional | ||
metadata = {"_user": "[email protected]", | ||
"environment": "development", | ||
"session_id": str(uuid.uuid4())} | ||
|
||
# client = LLMFactory(LLMProvider.PORTKEY_AZURE_OPENAI, metadata=metadata) | ||
# client = LLMFactory(LLMProvider.AZURE_OPENAI) | ||
client = LLMFactory(LLMProvider.PORTKEY_ANTHROPIC, metadata=metadata) | ||
|
||
class User(BaseModel): | ||
name: str | ||
age: int | ||
|
||
# client = instructor.from_openai(portkey) | ||
user_info = client.create_completion( | ||
# model="gpt-4o", | ||
max_tokens=1024, | ||
response_model=User, | ||
messages=[{"role": "user", "content": "John Doe is 30 years old."}], | ||
metadata=metadata, | ||
) | ||
|
||
print(user_info.name) | ||
print(user_info.age) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
from pydantic import Field, BaseModel | ||
import ava_mosaic_ai | ||
|
||
import warnings | ||
warnings.filterwarnings('error', category=UserWarning) | ||
|
||
class CompletionModel(BaseModel): | ||
response: str = Field(description="Your response to the user.") | ||
reasoning: str = Field(description="Explain your reasoning for the response.") | ||
|
||
messages = [ | ||
{"role": "system", "content": "You are a helpful assistant."}, | ||
{ | ||
"role": "user", | ||
"content": "If it takes 2 hours to dry 1 shirt out in the sun, how long will it take to dry 5 shirts?", | ||
}, | ||
] | ||
|
||
llm = ava_mosaic_ai.get_llm(provider = "azure_openai") | ||
llm.settings.default_model = "gpt-4o" | ||
completion = llm.create_completion( | ||
response_model=CompletionModel, | ||
messages=messages, | ||
) | ||
assert isinstance(completion, CompletionModel) | ||
|
||
print(f"Response: {completion.response}\n") | ||
print(f"Reasoning: {completion.reasoning}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,40 +1,24 @@ | ||
# from pydantic import Field, BaseModel | ||
# import ava_mosaic_ai | ||
import uuid | ||
from pydantic import BaseModel | ||
from ava_mosaic_ai.config.settings import LLMProvider | ||
from ava_mosaic_ai.llm_factory import LLMFactory | ||
|
||
# import warnings | ||
# warnings.filterwarnings('error', category=UserWarning) | ||
# client = LLMFactory(LLMProvider.PORTKEY_AZURE_OPENAI) | ||
# client = LLMFactory(LLMProvider.AZURE_OPENAI) | ||
|
||
# class CompletionModel(BaseModel): | ||
# response: str = Field(description="Your response to the user.") | ||
# reasoning: str = Field(description="Explain your reasoning for the response.") | ||
client = LLMFactory(LLMProvider.PORTKEY_ANTHROPIC, metadata=metadata) | ||
|
||
# messages = [ | ||
# {"role": "system", "content": "You are a helpful assistant."}, | ||
# { | ||
# "role": "user", | ||
# "content": "If it takes 2 hours to dry 1 shirt out in the sun, how long will it take to dry 5 shirts?", | ||
# }, | ||
# ] | ||
class User(BaseModel): | ||
name: str | ||
age: int | ||
|
||
# llm = ava_mosaic_ai.get_llm(provider = "azure_openai") | ||
# llm.settings.default_model = "gpt-4o" | ||
# completion = llm.create_completion( | ||
# response_model=CompletionModel, | ||
# messages=messages, | ||
# ) | ||
# assert isinstance(completion, CompletionModel) | ||
# client = instructor.from_openai(portkey) | ||
user_info = client.create_completion( | ||
# model="gpt-4o", | ||
max_tokens=1024, | ||
response_model=User, | ||
messages=[{"role": "user", "content": "John Doe is 30 years old."}], | ||
) | ||
|
||
# print(f"Response: {completion.response}\n") | ||
# print(f"Reasoning: {completion.reasoning}") | ||
|
||
import os | ||
from dotenv import load_dotenv | ||
from openai import OpenAI | ||
# load_dotenv() | ||
|
||
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY", "something")) | ||
try: | ||
response = client.models.list() | ||
print("API key is valid. Available models:", [model.id for model in response]) | ||
except Exception as e: | ||
print("API key validation failed:", str(e)) | ||
print(user_info.name) | ||
print(user_info.age) |
Oops, something went wrong.