Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: add python sdk in monorepo #706

Merged
merged 1 commit into from
Jan 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions packages/python-sdk/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
dist
.env
build
.DS_Store
.vscode/settings.json
__pycache__/
lunary/test.py
lunary/demo.py
data
26 changes: 26 additions & 0 deletions packages/python-sdk/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
<div align="center">

<img src="https://lunary.ai/logo.png" style='border-radius: 12px;' width="50"/>
<h1>Lunary Python SDK</h1>

**📈 Python monitoring for AI apps and agent**

[website](https://lunary.ai) - [docs](https://lunary.ai/docs/py/) - ![PyPI - Version](https://img.shields.io/pypi/v/lunary)

---

</div>

Use it with any LLM model and custom agents (not limited to OpenAI).

To get started, get a project ID by registering [here](https://lunary.ai).

## 🛠️ Installation

```bash
pip install lunary
```

## 📖 Documentation

Full docs are available [here](https://lunary.ai/docs/py).
27 changes: 27 additions & 0 deletions packages/python-sdk/examples/anthropic/async-streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
import asyncio
from anthropic import AsyncAnthropic
import lunary

client = AsyncAnthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted
)
lunary.monitor(client)


async def main() -> None:
stream = await client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)
for event in stream:
pass


asyncio.run(main())
26 changes: 26 additions & 0 deletions packages/python-sdk/examples/anthropic/async.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import os
import asyncio
from anthropic import AsyncAnthropic
import lunary

client = AsyncAnthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted
)
lunary.monitor(client)


async def main() -> None:
message = await client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)
print(message.content)


asyncio.run(main())
22 changes: 22 additions & 0 deletions packages/python-sdk/examples/anthropic/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
from anthropic import Anthropic
import lunary

client = Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"),
)
lunary.monitor(client)


message = client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)

print(message.ro
26 changes: 26 additions & 0 deletions packages/python-sdk/examples/anthropic/streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import os
from anthropic import Anthropic
import lunary

client = Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"),
)
lunary.monitor(client)


stream = client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
stream=True
)

for event in stream:
pass


86 changes: 86 additions & 0 deletions packages/python-sdk/examples/anthropic/tool-call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
from anthropic import Anthropic
import lunary

client = Anthropic()
lunary.monitor(client)

response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=[
{
"name": "get_weather",
"description": "Get the current weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
}
},
"required": ["location"],
},
}
],
messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}],
)

response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=[
{
"name": "get_weather",
"description": "Get the current weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "The unit of temperature, either 'celsius' or 'fahrenheit'"
}
},
"required": ["location"]
}
}
],
messages=[
{
"role": "user",
"content": "What's the weather like in San Francisco?"
},
{
"role": "assistant",
"content": [
{
"type": "text",
"text": "<thinking>I need to use get_weather, and the user wants SF, which is likely San Francisco, CA.</thinking>"
},
{
"type": "tool_use",
"id": "toolu_01A09q90qw90lq917835lq9",
"name": "get_weather",
"input": {"location": "San Francisco, CA", "unit": "celsius"}
}
]
},
{
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": "toolu_01A09q90qw90lq917835lq9", # from the API response
"content": "65 degrees" # from running your tool
}
]
}
]
)

print(response)
36 changes: 36 additions & 0 deletions packages/python-sdk/examples/azure-openai/async-stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os, asyncio
from openai import AsyncAzureOpenAI
import lunary

DEPLOYMENT_ID = os.environ.get("AZURE_OPENAI_DEPLOYMENT_ID")
RESOURCE_NAME = os.environ.get("AZURE_OPENAI_RESOURCE_NAME")
API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")


client = AsyncAzureOpenAI(
api_version="2023-07-01-preview",
api_key=API_KEY,
azure_endpoint=f"https://{DEPLOYMENT_ID}.openai.azure.com",
)

lunary.monitor(client)

async def main() -> None:
stream = await client.chat.completions.create(
model=RESOURCE_NAME,
stream=True,
messages=[
{
"role": "user",
"content": "Say this is an async stream test",
}
],
)
async for chunk in stream:
if not chunk.choices:
continue
print(chunk.choices[0].delta.content, end="")



asyncio.run(main())
32 changes: 32 additions & 0 deletions packages/python-sdk/examples/azure-openai/async.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os, asyncio
from openai import AsyncAzureOpenAI
import lunary

DEPLOYMENT_ID = os.environ.get("AZURE_OPENAI_DEPLOYMENT_ID")
RESOURCE_NAME = os.environ.get("AZURE_OPENAI_RESOURCE_NAME")
API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")


client = AsyncAzureOpenAI(
api_version="2023-07-01-preview",
api_key=API_KEY,
azure_endpoint=f"https://{DEPLOYMENT_ID}.openai.azure.com",
)

lunary.monitor(client)

async def main() -> None:
completion = await client.chat.completions.create(
model=RESOURCE_NAME,
messages=[
{
"role": "user",
"content": "Say this is an Async test",
}
],
)
print(completion.to_json())



asyncio.run(main())
27 changes: 27 additions & 0 deletions packages/python-sdk/examples/azure-openai/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
from openai import AzureOpenAI
import lunary

API_VERSION = os.environ.get("OPENAI_API_VERSION")
API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
AZURE_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
RESOURCE_NAME = os.environ.get("AZURE_OPENAI_RESOURCE_NAME")


client = AzureOpenAI(
api_version=API_VERSION,
azure_endpoint=AZURE_ENDPOINT,
api_key=API_KEY
)
lunary.monitor(client)

completion = client.chat.completions.create(
model=RESOURCE_NAME,
messages=[
{
"role": "user",
"content": "How do I output all files in a directory using Python?",
},
],
)
print(completion.to_json())
31 changes: 31 additions & 0 deletions packages/python-sdk/examples/azure-openai/stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import os
from openai import AzureOpenAI
import lunary

DEPLOYMENT_ID = os.environ.get("AZURE_OPENAI_DEPLOYMENT_ID")
RESOURCE_NAME = os.environ.get("AZURE_OPENAI_RESOURCE_NAME")
API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")


client = AzureOpenAI(
api_version="2023-07-01-preview",
api_key=API_KEY,
azure_endpoint=f"https://{DEPLOYMENT_ID}.openai.azure.com",
)

lunary.monitor(client)

stream = client.chat.completions.create(
model=RESOURCE_NAME,
stream=True,
messages=[
{
"role": "user",
"content": "Say sync stream",
},
],
)
for chunk in stream:
if not chunk.choices:
continue
print(chunk.choices[0].delta.content, end="")
Loading
Loading