Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for ollama models #86

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,8 @@ ANTHROPIC_API_KEY=your-anthropic-api-key

# For getting financial data to power the hedge fund
# Get your Financial Datasets API key from https://financialdatasets.ai/
FINANCIAL_DATASETS_API_KEY=your-financial-datasets-api-key
FINANCIAL_DATASETS_API_KEY=your-financial-datasets-api-key

# For running LLMs hosted by your private ollama api
# Install your ollama service from https://ollama.com/
OLLAMA_HOST='http://example.com:11434'
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ GROQ_API_KEY=your-groq-api-key
# For getting financial data to power the hedge fund
# Get your Financial Datasets API key from https://financialdatasets.ai/
FINANCIAL_DATASETS_API_KEY=your-financial-datasets-api-key

# For running LLMs hosted by ollama api
OLLAMA_HOST='http://localhost:11434'
```

**Important**: You must set `OPENAI_API_KEY`, `GROQ_API_KEY`, or `ANTHROPIC_API_KEY` for the hedge fund to work. If you want to use LLMs from all providers, you will need to set all API keys.
Expand Down
34 changes: 32 additions & 2 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ tabulate = "^0.9.0"
colorama = "^0.4.6"
questionary = "^2.1.0"
rich = "^13.9.4"
langchain-ollama = "^0.2.3"

[tool.poetry.group.dev.dependencies]
pytest = "^7.4.0"
Expand Down
20 changes: 17 additions & 3 deletions src/llm/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from langchain_anthropic import ChatAnthropic
from langchain_groq import ChatGroq
from langchain_openai import ChatOpenAI
from langchain_ollama import ChatOllama
from enum import Enum
from pydantic import BaseModel
from typing import Tuple
Expand All @@ -12,6 +13,7 @@ class ModelProvider(str, Enum):
OPENAI = "OpenAI"
GROQ = "Groq"
ANTHROPIC = "Anthropic"
OLLAMA = "Ollama"


class LLMModel(BaseModel):
Expand All @@ -23,7 +25,7 @@ class LLMModel(BaseModel):
def to_choice_tuple(self) -> Tuple[str, str, str]:
"""Convert to format needed for questionary choices"""
return (self.display_name, self.model_name, self.provider.value)

def is_deepseek(self) -> bool:
"""Check if the model is a DeepSeek model"""
return self.model_name.startswith("deepseek")
Expand Down Expand Up @@ -56,6 +58,12 @@ def is_deepseek(self) -> bool:
model_name="llama-3.3-70b-versatile",
provider=ModelProvider.GROQ
),
# Ollama models: Take 'deepseek-r1:14b' for example, add your custom models here.
LLMModel(
display_name="[ollama] deepseek-r1:14b",
model_name="deepseek-r1:14b",
provider=ModelProvider.OLLAMA
),
LLMModel(
display_name="[openai] gpt-4o",
model_name="gpt-4o",
Expand Down Expand Up @@ -85,7 +93,7 @@ def get_model_info(model_name: str) -> LLMModel | None:
"""Get model information by model_name"""
return next((model for model in AVAILABLE_MODELS if model.model_name == model_name), None)

def get_model(model_name: str, model_provider: ModelProvider) -> ChatOpenAI | ChatGroq | None:
def get_model(model_name: str, model_provider: ModelProvider) -> ChatOpenAI | ChatGroq | ChatAnthropic | ChatOllama | None:
if model_provider == ModelProvider.GROQ:
api_key = os.getenv("GROQ_API_KEY")
if not api_key:
Expand All @@ -106,4 +114,10 @@ def get_model(model_name: str, model_provider: ModelProvider) -> ChatOpenAI | Ch
if not api_key:
print(f"API Key Error: Please make sure ANTHROPIC_API_KEY is set in your .env file.")
raise ValueError("Anthropic API key not found. Please make sure ANTHROPIC_API_KEY is set in your .env file.")
return ChatAnthropic(model=model_name, api_key=api_key)
return ChatAnthropic(model=model_name, api_key=api_key)
elif model_provider == ModelProvider.OLLAMA:
ollama_host = os.getenv('OLLAMA_HOST')
if not ollama_host:
print(f"Ollama Key Error: Please make sure OLLAMA_HOST is set in your .env file.")
raise ValueError("Ollama key not found. Please make sure OLLAMA_HOST is set in your .env file.")
return ChatOllama(model=model_name, base_url=ollama_host)