Skip to content

Commit

Permalink
Log instead of warn when model isn't specified
Browse files Browse the repository at this point in the history
  • Loading branch information
cpsievert committed Dec 9, 2024
1 parent 4308214 commit bde8556
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 22 deletions.
6 changes: 3 additions & 3 deletions chatlas/_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from ._tokens import tokens_log
from ._tools import Tool, basemodel_to_param_schema
from ._turn import Turn, normalize_turns
from ._utils import inform_model_default
from ._utils import log_model_default

if TYPE_CHECKING:
from anthropic.types import (
Expand Down Expand Up @@ -164,7 +164,7 @@ def ChatAnthropic(
"""

if model is None:
model = inform_model_default("claude-3-5-sonnet-latest")
model = log_model_default("claude-3-5-sonnet-latest")

return Chat(
provider=AnthropicProvider(
Expand Down Expand Up @@ -583,7 +583,7 @@ def ChatBedrockAnthropic(

if model is None:
# Default model from https://github.com/anthropics/anthropic-sdk-python?tab=readme-ov-file#aws-bedrock
model = inform_model_default("anthropic.claude-3-5-sonnet-20241022-v2:0")
model = log_model_default("anthropic.claude-3-5-sonnet-20241022-v2:0")

return Chat(
provider=AnthropicBedrockProvider(
Expand Down
4 changes: 2 additions & 2 deletions chatlas/_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from ._chat import Chat
from ._openai import ChatOpenAI
from ._turn import Turn
from ._utils import MISSING, MISSING_TYPE, inform_model_default
from ._utils import MISSING, MISSING_TYPE, log_model_default

if TYPE_CHECKING:
from ._openai import ChatCompletion
Expand Down Expand Up @@ -131,7 +131,7 @@ def ChatGithub(
```
"""
if model is None:
model = inform_model_default("gpt-4o")
model = log_model_default("gpt-4o")
if api_key is None:
api_key = os.getenv("GITHUB_PAT")

Expand Down
4 changes: 2 additions & 2 deletions chatlas/_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from ._provider import Provider
from ._tools import Tool, basemodel_to_param_schema
from ._turn import Turn, normalize_turns
from ._utils import inform_model_default
from ._utils import log_model_default

if TYPE_CHECKING:
from google.generativeai.types.content_types import (
Expand Down Expand Up @@ -144,7 +144,7 @@ def ChatGoogle(
"""

if model is None:
model = inform_model_default("gemini-1.5-flash")
model = log_model_default("gemini-1.5-flash")

turns = normalize_turns(
turns or [],
Expand Down
4 changes: 2 additions & 2 deletions chatlas/_groq.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from ._chat import Chat
from ._openai import ChatOpenAI
from ._turn import Turn
from ._utils import MISSING, MISSING_TYPE, inform_model_default
from ._utils import MISSING, MISSING_TYPE, log_model_default

if TYPE_CHECKING:
from ._openai import ChatCompletion
Expand Down Expand Up @@ -127,7 +127,7 @@ def ChatGroq(
```
"""
if model is None:
model = inform_model_default("llama3-8b-8192")
model = log_model_default("llama3-8b-8192")
if api_key is None:
api_key = os.getenv("GROQ_API_KEY")

Expand Down
4 changes: 2 additions & 2 deletions chatlas/_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from ._tokens import tokens_log
from ._tools import Tool, basemodel_to_param_schema
from ._turn import Turn, normalize_turns
from ._utils import MISSING, MISSING_TYPE, inform_model_default, is_testing
from ._utils import MISSING, MISSING_TYPE, is_testing, log_model_default

if TYPE_CHECKING:
from openai.types.chat import (
Expand Down Expand Up @@ -164,7 +164,7 @@ def ChatOpenAI(
seed = 1014 if is_testing() else None

if model is None:
model = inform_model_default("gpt-4o")
model = log_model_default("gpt-4o")

return Chat(
provider=OpenAIProvider(
Expand Down
4 changes: 2 additions & 2 deletions chatlas/_perplexity.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from ._chat import Chat
from ._openai import ChatOpenAI
from ._turn import Turn
from ._utils import MISSING, MISSING_TYPE, inform_model_default
from ._utils import MISSING, MISSING_TYPE, log_model_default

if TYPE_CHECKING:
from ._openai import ChatCompletion
Expand Down Expand Up @@ -132,7 +132,7 @@ def ChatPerplexity(
```
"""
if model is None:
model = inform_model_default("llama-3.1-sonar-small-128k-online")
model = log_model_default("llama-3.1-sonar-small-128k-online")
if api_key is None:
api_key = os.getenv("PERPLEXITY_API_KEY")

Expand Down
21 changes: 12 additions & 9 deletions chatlas/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,18 @@

import functools
import inspect
import logging
import os
import re
import warnings
from typing import Awaitable, Callable, TypeVar, cast

from ._typing_extensions import ParamSpec, TypeGuard

# Copied from shiny/_utils.py
logger = logging.getLogger("chatlas")

# --------------------------------------------------------------------
# wrap_async() and is_async_callable() was copied from shiny/_utils.py
# --------------------------------------------------------------------

R = TypeVar("R") # Return type
P = ParamSpec("P")
Expand Down Expand Up @@ -76,15 +80,14 @@ class MISSING_TYPE:
MISSING = MISSING_TYPE()


class DefaultModelWarning(Warning):
pass
def log_model_default(model: str) -> str:
logger.info(f"Defaulting to `model = '{model}'`.")
return model


def inform_model_default(model: str, stacklevel: int = 3) -> str:
if not is_testing():
msg = f"Defaulting to `model = '{model}'`."
warnings.warn(msg, DefaultModelWarning, stacklevel=stacklevel)
return model
# --------------------------------------------------------------------
# html_escape was copied from htmltools/_utils.py
# --------------------------------------------------------------------


HTML_ESCAPE_TABLE = {
Expand Down

0 comments on commit bde8556

Please sign in to comment.