Skip to content

Commit

Permalink
prompty: docstring (langchain-ai#23152)
Browse files Browse the repository at this point in the history
Added missed docstrings. Format docstrings to the consistent format
(used in the API Reference)

---------

Co-authored-by: ccurme <[email protected]>
  • Loading branch information
leo-gan and ccurme authored Jun 19, 2024
1 parent 9b82707 commit 50484be
Show file tree
Hide file tree
Showing 6 changed files with 256 additions and 87 deletions.
28 changes: 28 additions & 0 deletions libs/partners/prompty/langchain_prompty/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,23 @@


class SimpleModel(BaseModel, Generic[T]):
"""Simple model for a single item."""

item: T


class PropertySettings(BaseModel):
"""Property settings for a prompty model."""

model_config = ConfigDict(arbitrary_types_allowed=True)
type: Literal["string", "number", "array", "object", "boolean"]
default: Union[str, int, float, List, Dict, bool] = Field(default=None)
description: str = Field(default="")


class ModelSettings(BaseModel):
"""Model settings for a prompty model."""

api: str = Field(default="")
configuration: dict = Field(default={})
parameters: dict = Field(default={})
Expand All @@ -40,11 +46,15 @@ def model_dump_safe(self) -> dict:


class TemplateSettings(BaseModel):
"""Template settings for a prompty model."""

type: str = Field(default="mustache")
parser: str = Field(default="")


class Prompty(BaseModel):
"""Base Prompty model."""

# metadata
name: str = Field(default="")
description: str = Field(default="")
Expand Down Expand Up @@ -147,6 +157,16 @@ def normalize(attribute: Any, parent: Path, env_error: bool = True) -> Any:
def param_hoisting(
top: Dict[str, Any], bottom: Dict[str, Any], top_key: Any = None
) -> Dict[str, Any]:
"""Merge two dictionaries with hoisting of parameters from bottom to top.
Args:
top: The top dictionary.
bottom: The bottom dictionary.
top_key: The key to hoist from the bottom to the top.
Returns:
The merged dictionary.
"""
if top_key:
new_dict = {**top[top_key]} if top_key in top else {}
else:
Expand All @@ -158,6 +178,8 @@ def param_hoisting(


class Invoker(abc.ABC):
"""Base class for all invokers."""

def __init__(self, prompty: Prompty) -> None:
self.prompty = prompty

Expand All @@ -170,11 +192,15 @@ def __call__(self, data: BaseModel) -> BaseModel:


class NoOpParser(Invoker):
"""NoOp parser for invokers."""

def invoke(self, data: BaseModel) -> BaseModel:
return data


class InvokerFactory(object):
"""Factory for creating invokers."""

_instance = None
_renderers: Dict[str, Type[Invoker]] = {}
_parsers: Dict[str, Type[Invoker]] = {}
Expand Down Expand Up @@ -259,6 +285,8 @@ def to_json(self) -> str:


class Frontmatter:
"""Class for reading frontmatter from a string or file."""

_yaml_delim = r"(?:---|\+\+\+)"
_yaml = r"(.*?)"
_content = r"\s*(.+)$"
Expand Down
2 changes: 2 additions & 0 deletions libs/partners/prompty/langchain_prompty/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
def create_chat_prompt(
path: str, input_name_agent_scratchpad: str = "agent_scratchpad"
) -> Runnable[Dict[str, Any], ChatPromptTemplate]:
"""Create a chat prompt from a Langchain schema."""

def runnable_chat_lambda(inputs: Dict[str, Any]) -> ChatPromptTemplate:
p = load(path)
parsed = prepare(p, inputs)
Expand Down
2 changes: 2 additions & 0 deletions libs/partners/prompty/langchain_prompty/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@


class PromptyChatParser(Invoker):
"""Parse a chat prompt into a list of messages."""

def __init__(self, prompty: Prompty) -> None:
self.prompty = prompty
self.roles = ["assistant", "function", "system", "user", "human", "ai"]
Expand Down
2 changes: 2 additions & 0 deletions libs/partners/prompty/langchain_prompty/renderers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@


class MustacheRenderer(Invoker):
"""Render a mustache template."""

def __init__(self, prompty: Prompty) -> None:
self.prompty = prompty

Expand Down
45 changes: 45 additions & 0 deletions libs/partners/prompty/langchain_prompty/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,15 @@


def load(prompt_path: str, configuration: str = "default") -> Prompty:
"""Load a prompty file and return a Prompty object.
Args:
prompt_path: The path to the prompty file.
configuration: The configuration to use. Defaults to "default".
Returns:
The Prompty object.
"""
file_path = Path(prompt_path)
if not file_path.is_absolute():
# get caller's path (take into account trace frame)
Expand Down Expand Up @@ -113,6 +122,15 @@ def prepare(
prompt: Prompty,
inputs: Dict[str, Any] = {},
) -> Any:
"""Prepare the inputs for the prompty.
Args:
prompt: The Prompty object.
inputs: The inputs to the prompty. Defaults to {}.
Returns:
The prepared inputs.
"""
invoker = InvokerFactory()

inputs = param_hoisting(inputs, prompt.sample)
Expand Down Expand Up @@ -153,6 +171,18 @@ def run(
parameters: Dict[str, Any] = {},
raw: bool = False,
) -> Any:
"""Run the prompty.
Args:
prompt: The Prompty object.
content: The content to run the prompty on.
configuration: The configuration to use. Defaults to {}.
parameters: The parameters to use. Defaults to {}.
raw: Whether to return the raw output. Defaults to False.
Returns:
The result of running the prompty.
"""
invoker = InvokerFactory()

if configuration != {}:
Expand Down Expand Up @@ -195,6 +225,21 @@ def execute(
raw: bool = False,
connection: str = "default",
) -> Any:
"""Execute a prompty.
Args:
prompt: The prompt to execute.
Can be a path to a prompty file or a Prompty object.
configuration: The configuration to use. Defaults to {}.
parameters: The parameters to use. Defaults to {}.
inputs: The inputs to the prompty. Defaults to {}.
raw: Whether to return the raw output. Defaults to False.
connection: The connection to use. Defaults to "default".
Returns:
The result of executing the prompty.
"""

if isinstance(prompt, str):
prompt = load(prompt, connection)

Expand Down
Loading

0 comments on commit 50484be

Please sign in to comment.