diff --git a/docs/docs/concepts.mdx b/docs/docs/concepts.mdx
index cdcbc647cb586..30671511805bc 100644
--- a/docs/docs/concepts.mdx
+++ b/docs/docs/concepts.mdx
@@ -179,8 +179,15 @@ For a full list of LangChain model providers with multimodal models, [check out
### LLMs
+:::caution
+Pure text-in/text-out LLMs tend to be older or lower-level. Many popular models are best used as [chat completion models](/docs/concepts/#chat-models),
+even for non-chat use cases.
+
+You are probably looking for [the section above instead](/docs/concepts/#chat-models).
+:::
+
Language models that takes a string as input and returns a string.
-These are traditionally older models (newer models generally are [Chat Models](/docs/concepts/#chat-models), see below).
+These are traditionally older models (newer models generally are [Chat Models](/docs/concepts/#chat-models), see above).
Although the underlying models are string in, string out, the LangChain wrappers also allow these models to take messages as input.
This gives them the same interface as [Chat Models](/docs/concepts/#chat-models).
diff --git a/docs/docs/how_to/chatbots_tools.ipynb b/docs/docs/how_to/chatbots_tools.ipynb
index 5ea0c54bbcc81..07fff046b306c 100644
--- a/docs/docs/how_to/chatbots_tools.ipynb
+++ b/docs/docs/how_to/chatbots_tools.ipynb
@@ -1,58 +1,37 @@
{
"cells": [
- {
- "cell_type": "raw",
- "metadata": {},
- "source": [
- "---\n",
- "sidebar_position: 3\n",
- "---"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# How to add tools to chatbots\n",
"\n",
- "This section will cover how to create conversational agents: chatbots that can interact with other systems and APIs using tools.\n",
+ ":::info Prerequisites\n",
+ "\n",
+ "This guide assumes familiarity with the following concepts:\n",
+ "\n",
+ "- [Chatbots](/docs/concepts/#messages)\n",
+ "- [Agents](/docs/tutorials/agents)\n",
+ "- [Chat history](/docs/concepts/#chat-history)\n",
"\n",
- "Before reading this guide, we recommend you read both [the chatbot quickstart](/docs/tutorials/chatbot) in this section and be familiar with [the documentation on agents](/docs/tutorials/agents).\n",
+ ":::\n",
+ "\n",
+ "This section will cover how to create conversational agents: chatbots that can interact with other systems and APIs using tools.\n",
"\n",
"## Setup\n",
"\n",
- "For this guide, we'll be using an [OpenAI tools agent](/docs/how_to/agent_executor) with a single tool for searching the web. The default will be powered by [Tavily](/docs/integrations/tools/tavily_search), but you can switch it out for any similar tool. The rest of this section will assume you're using Tavily.\n",
+ "For this guide, we'll be using a [tool calling agent](/docs/how_to/agent_executor) with a single tool for searching the web. The default will be powered by [Tavily](/docs/integrations/tools/tavily_search), but you can switch it out for any similar tool. The rest of this section will assume you're using Tavily.\n",
"\n",
"You'll need to [sign up for an account](https://tavily.com/) on the Tavily website, and install the following packages:"
]
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "\u001b[33mWARNING: You are using pip version 22.0.4; however, version 23.3.2 is available.\n",
- "You should consider upgrading via the '/Users/jacoblee/.pyenv/versions/3.10.5/bin/python -m pip install --upgrade pip' command.\u001b[0m\u001b[33m\n",
- "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n"
- ]
- },
- {
- "data": {
- "text/plain": [
- "True"
- ]
- },
- "execution_count": 1,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
- "%pip install --upgrade --quiet langchain-openai tavily-python\n",
+ "%pip install --upgrade --quiet langchain-community langchain-openai tavily-python\n",
"\n",
"# Set env var OPENAI_API_KEY or load from a .env file:\n",
"import dotenv\n",
@@ -107,17 +86,17 @@
"metadata": {},
"outputs": [],
"source": [
- "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
+ "from langchain_core.prompts import ChatPromptTemplate\n",
"\n",
- "# Adapted from https://smith.langchain.com/hub/hwchase17/openai-tools-agent\n",
+ "# Adapted from https://smith.langchain.com/hub/jacob/tool-calling-agent\n",
"prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
" (\n",
" \"system\",\n",
" \"You are a helpful assistant. You may not need to use tools for every query - the user may just want to chat!\",\n",
" ),\n",
- " MessagesPlaceholder(variable_name=\"messages\"),\n",
- " MessagesPlaceholder(variable_name=\"agent_scratchpad\"),\n",
+ " (\"placeholder\", \"{messages}\"),\n",
+ " (\"placeholder\", \"{agent_scratchpad}\"),\n",
" ]\n",
")"
]
@@ -135,9 +114,9 @@
"metadata": {},
"outputs": [],
"source": [
- "from langchain.agents import AgentExecutor, create_openai_tools_agent\n",
+ "from langchain.agents import AgentExecutor, create_tool_calling_agent\n",
"\n",
- "agent = create_openai_tools_agent(chat, tools, prompt)\n",
+ "agent = create_tool_calling_agent(chat, tools, prompt)\n",
"\n",
"agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)"
]
@@ -209,7 +188,9 @@
"Invoking: `tavily_search_results_json` with `{'query': 'current conservation status of the Great Barrier Reef'}`\n",
"\n",
"\n",
- "\u001b[0m\u001b[36;1m\u001b[1;3m[{'url': 'https://www.barrierreef.org/news/blog/this-is-the-critical-decade-for-coral-reef-survival', 'content': \"global coral reef conservation. © 2024 Great Barrier Reef Foundation. Website by bigfish.tv #Related News · 29 January 2024 290m more baby corals to help restore and protect the Great Barrier Reef Great Barrier Reef Foundation Managing Director Anna Marsden says it’s not too late if we act now.The Status of Coral Reefs of the World: 2020 report is the largest analysis of global coral reef health ever undertaken. It found that 14 per cent of the world's coral has been lost since 2009. The report also noted, however, that some of these corals recovered during the 10 years to 2019.\"}]\u001b[0m\u001b[32;1m\u001b[1;3mThe current conservation status of the Great Barrier Reef is a critical concern. According to the Great Barrier Reef Foundation, the Status of Coral Reefs of the World: 2020 report found that 14% of the world's coral has been lost since 2009. However, the report also noted that some of these corals recovered during the 10 years to 2019. For more information, you can visit the following link: [Great Barrier Reef Foundation - Conservation Status](https://www.barrierreef.org/news/blog/this-is-the-critical-decade-for-coral-reef-survival)\u001b[0m\n",
+ "\u001b[0m\u001b[36;1m\u001b[1;3m[{'url': 'https://www.abc.net.au/news/2022-08-04/great-barrier-reef-report-says-coral-recovering-after-bleaching/101296186', 'content': 'Great Barrier Reef hit with widespread and severe bleaching event\\n\\'Devastating\\': Over 90pc of reefs on Great Barrier Reef suffered bleaching over summer, report reveals\\nTop Stories\\nJailed Russian opposition leader Alexei Navalny is dead, says prison service\\nTaylor Swift puts an Aussie twist on a classic as she packs the MCG for the biggest show of her career — as it happened\\nMelbourne comes alive with Swifties, as even those without tickets turn up to soak in the atmosphere\\nAustralian Border Force investigates after arrival of more than 20 men by boat north of Broome\\nOpenAI launches video model that can instantly create short clips from text prompts\\nAntoinette Lattouf loses bid to force ABC to produce emails calling for her dismissal\\nCategory one cyclone makes landfall in Gulf of Carpentaria off NT-Queensland border\\nWhy the RBA may be forced to cut before the Fed\\nBrisbane records \\'wettest day since 2022\\', as woman dies in floodwaters near Mount Isa\\n$45m Sydney beachside home once owned by late radio star is demolished less than a year after sale\\nAnnabel Sutherland\\'s historic double century puts Australia within reach of Test victory over South Africa\\nAlmighty defensive effort delivers Indigenous victory in NRL All Stars clash\\nLisa Wilkinson feared she would have to sell home to pay legal costs of Bruce Lehrmann\\'s defamation case, court documents reveal\\nSupermarkets as you know them are disappearing from our cities\\nNRL issues Broncos\\' Reynolds, Carrigan with breach notices after public scrap\\nPopular Now\\nJailed Russian opposition leader Alexei Navalny is dead, says prison service\\nTaylor Swift puts an Aussie twist on a classic as she packs the MCG for the biggest show of her career — as it happened\\n$45m Sydney beachside home once owned by late radio star is demolished less than a year after sale\\nAustralian Border Force investigates after arrival of more than 20 men by boat north of Broome\\nDealer sentenced for injecting children as young as 12 with methylamphetamine\\nMelbourne comes alive with Swifties, as even those without tickets turn up to soak in the atmosphere\\nTop Stories\\nJailed Russian opposition leader Alexei Navalny is dead, says prison service\\nTaylor Swift puts an Aussie twist on a classic as she packs the MCG for the biggest show of her career — as it happened\\nMelbourne comes alive with Swifties, as even those without tickets turn up to soak in the atmosphere\\nAustralian Border Force investigates after arrival of more than 20 men by boat north of Broome\\nOpenAI launches video model that can instantly create short clips from text prompts\\nJust In\\nJailed Russian opposition leader Alexei Navalny is dead, says prison service\\nMelbourne comes alive with Swifties, as even those without tickets turn up to soak in the atmosphere\\nTraveller alert after one-year-old in Adelaide reported with measles\\nAntoinette Lattouf loses bid to force ABC to produce emails calling for her dismissal\\nFooter\\nWe acknowledge Aboriginal and Torres Strait Islander peoples as the First Australians and Traditional Custodians of the lands where we live, learn, and work.\\n Increased coral cover could come at a cost\\nThe rapid growth in coral cover appears to have come at the expense of the diversity of coral on the reef, with most of the increases accounted for by fast-growing branching coral called Acropora.\\n Documents obtained by the ABC under Freedom of Information laws revealed the Morrison government had forced AIMS to rush the report\\'s release and orchestrated a \"leak\" of the material to select media outlets ahead of the reef being considered for inclusion on the World Heritage In Danger list.\\n The reef\\'s status and potential inclusion on the In Danger list were due to be discussed at the 45th session of the World Heritage Committee in Russia in June this year, but the meeting was indefinitely postponed due to the war in Ukraine.\\n More from ABC\\nEditorial Policies\\nGreat Barrier Reef coral cover at record levels after mass-bleaching events, report shows\\nGreat Barrier Reef coral cover at record levels after mass-bleaching events, report shows\\nRecord coral cover is being seen across much of the Great Barrier Reef as it recovers from past storms and mass-bleaching events.'}]\u001b[0m\u001b[32;1m\u001b[1;3mThe Great Barrier Reef is currently showing signs of recovery, with record coral cover being seen across much of the reef. This recovery comes after past storms and mass-bleaching events. However, the rapid growth in coral cover appears to have come at the expense of the diversity of coral on the reef, with most of the increases accounted for by fast-growing branching coral called Acropora. There were discussions about the reef's potential inclusion on the World Heritage In Danger list, but the meeting to consider this was indefinitely postponed due to the war in Ukraine.\n",
+ "\n",
+ "You can read more about it in this article: [Great Barrier Reef hit with widespread and severe bleaching event](https://www.abc.net.au/news/2022-08-04/great-barrier-reef-report-says-coral-recovering-after-bleaching/101296186)\u001b[0m\n",
"\n",
"\u001b[1m> Finished chain.\u001b[0m\n"
]
@@ -218,7 +199,7 @@
"data": {
"text/plain": [
"{'messages': [HumanMessage(content='What is the current conservation status of the Great Barrier Reef?')],\n",
- " 'output': \"The current conservation status of the Great Barrier Reef is a critical concern. According to the Great Barrier Reef Foundation, the Status of Coral Reefs of the World: 2020 report found that 14% of the world's coral has been lost since 2009. However, the report also noted that some of these corals recovered during the 10 years to 2019. For more information, you can visit the following link: [Great Barrier Reef Foundation - Conservation Status](https://www.barrierreef.org/news/blog/this-is-the-critical-decade-for-coral-reef-survival)\"}"
+ " 'output': \"The Great Barrier Reef is currently showing signs of recovery, with record coral cover being seen across much of the reef. This recovery comes after past storms and mass-bleaching events. However, the rapid growth in coral cover appears to have come at the expense of the diversity of coral on the reef, with most of the increases accounted for by fast-growing branching coral called Acropora. There were discussions about the reef's potential inclusion on the World Heritage In Danger list, but the meeting to consider this was indefinitely postponed due to the war in Ukraine.\\n\\nYou can read more about it in this article: [Great Barrier Reef hit with widespread and severe bleaching event](https://www.abc.net.au/news/2022-08-04/great-barrier-reef-report-says-coral-recovering-after-bleaching/101296186)\"}"
]
},
"execution_count": 6,
@@ -296,7 +277,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "If preferred, you can also wrap the agent executor in a `RunnableWithMessageHistory` class to internally manage history messages. First, we need to slightly modify the prompt to take a separate input variable so that the wrapper can parse which input value to store as history:"
+ "If preferred, you can also wrap the agent executor in a [`RunnableWithMessageHistory`](/docs/how_to/message_history/) class to internally manage history messages. Let's redeclare it this way:"
]
},
{
@@ -305,20 +286,7 @@
"metadata": {},
"outputs": [],
"source": [
- "# Adapted from https://smith.langchain.com/hub/hwchase17/openai-tools-agent\n",
- "prompt = ChatPromptTemplate.from_messages(\n",
- " [\n",
- " (\n",
- " \"system\",\n",
- " \"You are a helpful assistant. You may not need to use tools for every query - the user may just want to chat!\",\n",
- " ),\n",
- " MessagesPlaceholder(variable_name=\"chat_history\"),\n",
- " (\"human\", \"{input}\"),\n",
- " MessagesPlaceholder(variable_name=\"agent_scratchpad\"),\n",
- " ]\n",
- ")\n",
- "\n",
- "agent = create_openai_tools_agent(chat, tools, prompt)\n",
+ "agent = create_tool_calling_agent(chat, tools, prompt)\n",
"\n",
"agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)"
]
@@ -332,27 +300,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [],
- "source": [
- "from langchain_community.chat_message_histories import ChatMessageHistory\n",
- "from langchain_core.runnables.history import RunnableWithMessageHistory\n",
- "\n",
- "demo_ephemeral_chat_history_for_chain = ChatMessageHistory()\n",
- "\n",
- "conversational_agent_executor = RunnableWithMessageHistory(\n",
- " agent_executor,\n",
- " lambda session_id: demo_ephemeral_chat_history_for_chain,\n",
- " input_messages_key=\"input\",\n",
- " output_messages_key=\"output\",\n",
- " history_messages_key=\"chat_history\",\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
@@ -370,28 +318,44 @@
{
"data": {
"text/plain": [
- "{'input': \"I'm Nemo!\",\n",
- " 'chat_history': [],\n",
+ "{'messages': [HumanMessage(content=\"I'm Nemo!\")],\n",
" 'output': \"Hi Nemo! It's great to meet you. How can I assist you today?\"}"
]
},
- "execution_count": 10,
+ "execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
+ "from langchain_community.chat_message_histories import ChatMessageHistory\n",
+ "from langchain_core.runnables.history import RunnableWithMessageHistory\n",
+ "\n",
+ "demo_ephemeral_chat_history_for_chain = ChatMessageHistory()\n",
+ "\n",
+ "conversational_agent_executor = RunnableWithMessageHistory(\n",
+ " agent_executor,\n",
+ " lambda session_id: demo_ephemeral_chat_history_for_chain,\n",
+ " input_messages_key=\"messages\",\n",
+ " output_messages_key=\"output\",\n",
+ ")\n",
+ "\n",
"conversational_agent_executor.invoke(\n",
- " {\n",
- " \"input\": \"I'm Nemo!\",\n",
- " },\n",
+ " {\"messages\": [HumanMessage(\"I'm Nemo!\")]},\n",
" {\"configurable\": {\"session_id\": \"unused\"}},\n",
")"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "And then if we rerun our wrapped agent executor:"
+ ]
+ },
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": null,
"metadata": {},
"outputs": [
{
@@ -409,9 +373,9 @@
{
"data": {
"text/plain": [
- "{'input': 'What is my name?',\n",
- " 'chat_history': [HumanMessage(content=\"I'm Nemo!\"),\n",
- " AIMessage(content=\"Hi Nemo! It's great to meet you. How can I assist you today?\")],\n",
+ "{'messages': [HumanMessage(content=\"I'm Nemo!\"),\n",
+ " AIMessage(content=\"Hi Nemo! It's great to meet you. How can I assist you today?\"),\n",
+ " HumanMessage(content='What is my name?')],\n",
" 'output': 'Your name is Nemo! How can I assist you today, Nemo?'}"
]
},
@@ -422,9 +386,7 @@
],
"source": [
"conversational_agent_executor.invoke(\n",
- " {\n",
- " \"input\": \"What is my name?\",\n",
- " },\n",
+ " {\"messages\": [HumanMessage(\"What is my name?\")]},\n",
" {\"configurable\": {\"session_id\": \"unused\"}},\n",
")"
]
@@ -433,6 +395,8 @@
"cell_type": "markdown",
"metadata": {},
"source": [
+ "This [LangSmith trace](https://smith.langchain.com/public/1a9f712a-7918-4661-b3ff-d979bcc2af42/r) shows what's going on under the hood.\n",
+ "\n",
"## Further reading\n",
"\n",
"Other types agents can also support conversational responses too - for more, check out the [agents section](/docs/tutorials/agents).\n",
@@ -457,7 +421,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.1"
+ "version": "3.10.5"
}
},
"nbformat": 4,
diff --git a/docs/docs/how_to/message_history.ipynb b/docs/docs/how_to/message_history.ipynb
index 79f21f9dd95f7..7befae0f57e27 100644
--- a/docs/docs/how_to/message_history.ipynb
+++ b/docs/docs/how_to/message_history.ipynb
@@ -129,7 +129,7 @@
"id": "a531da5e",
"metadata": {},
"source": [
- "## What is the runnable you are trying wrap?\n",
+ "## What is the runnable you are trying to wrap?\n",
"\n",
"`RunnableWithMessageHistory` can only wrap certain types of Runnables. Specifically, it can be used for any Runnable that takes as input one of:\n",
"\n",
diff --git a/docs/docs/how_to/sql_query_checking.ipynb b/docs/docs/how_to/sql_query_checking.ipynb
index 99ca9f9ddeafd..f4205c3f141ff 100644
--- a/docs/docs/how_to/sql_query_checking.ipynb
+++ b/docs/docs/how_to/sql_query_checking.ipynb
@@ -243,7 +243,7 @@
"text": [
"================================\u001b[1m System Message \u001b[0m================================\n",
"\n",
- "You are a \u001b[33;1m\u001b[1;3m{dialect}\u001b[0m expert. Given an input question, creat a syntactically correct \u001b[33;1m\u001b[1;3m{dialect}\u001b[0m query to run.\n",
+ "You are a \u001b[33;1m\u001b[1;3m{dialect}\u001b[0m expert. Given an input question, create a syntactically correct \u001b[33;1m\u001b[1;3m{dialect}\u001b[0m query to run.\n",
"Unless the user specifies in the question a specific number of examples to obtain, query for at most \u001b[33;1m\u001b[1;3m{top_k}\u001b[0m results using the LIMIT clause as per \u001b[33;1m\u001b[1;3m{dialect}\u001b[0m. You can order the results to return the most informative data in the database.\n",
"Never query for all columns from a table. You must query only the columns that are needed to answer the question. Wrap each column name in double quotes (\") to denote them as delimited identifiers.\n",
"Pay attention to use only the column names you can see in the tables below. Be careful to not query for columns that do not exist. Also, pay attention to which column is in which table.\n",
@@ -275,7 +275,7 @@
}
],
"source": [
- "system = \"\"\"You are a {dialect} expert. Given an input question, creat a syntactically correct {dialect} query to run.\n",
+ "system = \"\"\"You are a {dialect} expert. Given an input question, create a syntactically correct {dialect} query to run.\n",
"Unless the user specifies in the question a specific number of examples to obtain, query for at most {top_k} results using the LIMIT clause as per {dialect}. You can order the results to return the most informative data in the database.\n",
"Never query for all columns from a table. You must query only the columns that are needed to answer the question. Wrap each column name in double quotes (\") to denote them as delimited identifiers.\n",
"Pay attention to use only the column names you can see in the tables below. Be careful to not query for columns that do not exist. Also, pay attention to which column is in which table.\n",
diff --git a/docs/docs/integrations/chat/oci_generative_ai.ipynb b/docs/docs/integrations/chat/oci_generative_ai.ipynb
new file mode 100644
index 0000000000000..4ce58a13fbf00
--- /dev/null
+++ b/docs/docs/integrations/chat/oci_generative_ai.ipynb
@@ -0,0 +1,190 @@
+{
+ "cells": [
+ {
+ "cell_type": "raw",
+ "id": "afaf8039",
+ "metadata": {},
+ "source": [
+ "---\n",
+ "sidebar_label: OCIGenAI\n",
+ "---"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e49f1e0d",
+ "metadata": {},
+ "source": [
+ "# ChatOCIGenAI\n",
+ "\n",
+ "This notebook provides a quick overview for getting started with OCIGenAI [chat models](/docs/concepts/#chat-models). For detailed documentation of all ChatOCIGenAI features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.oci_generative_ai.ChatOCIGenAI.html).\n",
+ "\n",
+ "Oracle Cloud Infrastructure (OCI) Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases, and which is available through a single API.\n",
+ "Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned custom models based on your own data on dedicated AI clusters. Detailed documentation of the service and API is available __[here](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm)__ and __[here](https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai/20231130/)__.\n",
+ "\n",
+ "\n",
+ "## Overview\n",
+ "### Integration details\n",
+ "\n",
+ "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/v0.2/docs/integrations/chat/oci_generative_ai) | Package downloads | Package latest |\n",
+ "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
+ "| [ChatOCIGenAI](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.oci_generative_ai.ChatOCIGenAI.html) | [langchain-community](https://api.python.langchain.com/en/latest/community_api_reference.html) | ❌ | ❌ | ❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-oci-generative-ai?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-oci-generative-ai?style=flat-square&label=%20) |\n",
+ "\n",
+ "### Model features\n",
+ "| [Tool calling](/docs/how_to/tool_calling/) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n",
+ "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n",
+ "| ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | \n",
+ "\n",
+ "## Setup\n",
+ "\n",
+ "To access OCIGenAI models you'll need to install the `oci` and `langchain-community` packages.\n",
+ "\n",
+ "### Credentials\n",
+ "\n",
+ "The credentials and authentication methods supported for this integration are equivalent to those used with other OCI services and follow the __[standard SDK authentication](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm)__ methods, specifically API Key, session token, instance principal, and resource principal.\n",
+ "\n",
+ "API key is the default authentication method used in the examples above. The following example demonstrates how to use a different authentication method (session token)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "0730d6a1-c893-4840-9817-5e5251676d5d",
+ "metadata": {},
+ "source": [
+ "### Installation\n",
+ "\n",
+ "The LangChain OCIGenAI integration lives in the `langchain-community` package and you will also need to install the `oci` package:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "652d6238-1f87-422a-b135-f5abbb8652fc",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%pip install -qU langchain-community oci"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a38cde65-254d-4219-a441-068766c0d4b5",
+ "metadata": {},
+ "source": [
+ "## Instantiation\n",
+ "\n",
+ "Now we can instantiate our model object and generate chat completions:\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from langchain_community.chat_models.oci_generative_ai import ChatOCIGenAI\n",
+ "from langchain_core.messages import AIMessage, HumanMessage, SystemMessage\n",
+ "\n",
+ "chat = ChatOCIGenAI(\n",
+ " model_id=\"cohere.command-r-16k\",\n",
+ " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
+ " compartment_id=\"MY_OCID\",\n",
+ " model_kwargs={\"temperature\": 0.7, \"max_tokens\": 500},\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2b4f3e15",
+ "metadata": {},
+ "source": [
+ "## Invocation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "62e0dbc3",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "messages = [\n",
+ " SystemMessage(content=\"your are an AI assistant.\"),\n",
+ " AIMessage(content=\"Hi there human!\"),\n",
+ " HumanMessage(content=\"tell me a joke.\"),\n",
+ "]\n",
+ "response = chat.invoke(messages)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "print(response.content)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8",
+ "metadata": {},
+ "source": [
+ "## Chaining\n",
+ "\n",
+ "We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from langchain_core.prompts import ChatPromptTemplate\n",
+ "\n",
+ "prompt = ChatPromptTemplate.from_template(\"Tell me a joke about {topic}\")\n",
+ "chain = prompt | chat\n",
+ "\n",
+ "response = chain.invoke({\"topic\": \"dogs\"})\n",
+ "print(response.content)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3",
+ "metadata": {},
+ "source": [
+ "## API reference\n",
+ "\n",
+ "For detailed documentation of all ChatOCIGenAI features and configurations head to the API reference: https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.oci_generative_ai.ChatOCIGenAI.html"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.1"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/docs/integrations/llms/bedrock.ipynb b/docs/docs/integrations/llms/bedrock.ipynb
index d751311452b6a..bb1b62ad61b24 100644
--- a/docs/docs/integrations/llms/bedrock.ipynb
+++ b/docs/docs/integrations/llms/bedrock.ipynb
@@ -52,67 +52,6 @@
")"
]
},
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Using in a conversation chain"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from langchain.chains import ConversationChain\n",
- "from langchain.memory import ConversationBufferMemory\n",
- "\n",
- "conversation = ConversationChain(\n",
- " llm=llm, verbose=True, memory=ConversationBufferMemory()\n",
- ")\n",
- "\n",
- "conversation.predict(input=\"Hi there!\")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Conversation Chain With Streaming"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from langchain_community.llms import Bedrock\n",
- "from langchain_core.callbacks import StreamingStdOutCallbackHandler\n",
- "\n",
- "llm = Bedrock(\n",
- " credentials_profile_name=\"bedrock-admin\",\n",
- " model_id=\"amazon.titan-text-express-v1\",\n",
- " streaming=True,\n",
- " callbacks=[StreamingStdOutCallbackHandler()],\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "conversation = ConversationChain(\n",
- " llm=llm, verbose=True, memory=ConversationBufferMemory()\n",
- ")\n",
- "\n",
- "conversation.predict(input=\"Hi there!\")"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -132,22 +71,17 @@
" model_id=\"\", # ARN like 'arn:aws:bedrock:...' obtained via provisioning the custom model\n",
" model_kwargs={\"temperature\": 1},\n",
" streaming=True,\n",
- " callbacks=[StreamingStdOutCallbackHandler()],\n",
")\n",
"\n",
- "conversation = ConversationChain(\n",
- " llm=custom_llm, verbose=True, memory=ConversationBufferMemory()\n",
- ")\n",
- "conversation.predict(input=\"What is the recipe of mayonnaise?\")"
+ "custom_llm.invoke(input=\"What is the recipe of mayonnaise?\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Guardrails for Amazon Bedrock example \n",
+ "## Guardrails for Amazon Bedrock\n",
"\n",
- "## Guardrails for Amazon Bedrock (Preview) \n",
"[Guardrails for Amazon Bedrock](https://aws.amazon.com/bedrock/guardrails/) evaluates user inputs and model responses based on use case specific policies, and provides an additional layer of safeguards regardless of the underlying model. Guardrails can be applied across models, including Anthropic Claude, Meta Llama 2, Cohere Command, AI21 Labs Jurassic, and Amazon Titan Text, as well as fine-tuned models.\n",
"**Note**: Guardrails for Amazon Bedrock is currently in preview and not generally available. Reach out through your usual AWS Support contacts if you’d like access to this feature.\n",
"In this section, we are going to set up a Bedrock language model with specific guardrails that include tracing capabilities. "
diff --git a/docs/docs/integrations/llms/oci_generative_ai.ipynb b/docs/docs/integrations/llms/oci_generative_ai.ipynb
index 0c2368efdc293..3da80aef0e4ae 100644
--- a/docs/docs/integrations/llms/oci_generative_ai.ipynb
+++ b/docs/docs/integrations/llms/oci_generative_ai.ipynb
@@ -14,15 +14,15 @@
"Oracle Cloud Infrastructure (OCI) Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases, and which is available through a single API.\n",
"Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned custom models based on your own data on dedicated AI clusters. Detailed documentation of the service and API is available __[here](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm)__ and __[here](https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai/20231130/)__.\n",
"\n",
- "This notebook explains how to use OCI's Genrative AI models with LangChain."
+ "This notebook explains how to use OCI's Generative AI complete models with LangChain."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Prerequisite\n",
- "We will need to install the oci sdk"
+ "## Setup\n",
+ "Ensure that the oci sdk and the langchain-community package are installed"
]
},
{
@@ -31,38 +31,40 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install -U oci"
+ "!pip install -U oci langchain-community"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "### OCI Generative AI API endpoint \n",
- "https://inference.generativeai.us-chicago-1.oci.oraclecloud.com"
+ "## Usage"
]
},
{
- "cell_type": "markdown",
+ "cell_type": "code",
+ "execution_count": null,
"metadata": {},
+ "outputs": [],
"source": [
- "## Authentication\n",
- "The authentication methods supported for this langchain integration are:\n",
+ "from langchain_community.llms.oci_generative_ai import OCIGenAI\n",
"\n",
- "1. API Key\n",
- "2. Session token\n",
- "3. Instance principal\n",
- "4. Resource principal \n",
+ "llm = OCIGenAI(\n",
+ " model_id=\"cohere.command\",\n",
+ " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
+ " compartment_id=\"MY_OCID\",\n",
+ " model_kwargs={\"temperature\": 0, \"max_tokens\": 500},\n",
+ ")\n",
"\n",
- "These follows the standard SDK authentication methods detailed __[here](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm)__.\n",
- " "
+ "response = llm.invoke(\"Tell me one fact about earth\", temperature=0.7)\n",
+ "print(response)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## Usage"
+ "#### Chaining with prompt templates"
]
},
{
@@ -71,44 +73,54 @@
"metadata": {},
"outputs": [],
"source": [
- "from langchain_community.llms import OCIGenAI\n",
+ "from langchain_core.prompts import PromptTemplate\n",
"\n",
- "# use default authN method API-key\n",
"llm = OCIGenAI(\n",
- " model_id=\"MY_MODEL\",\n",
+ " model_id=\"cohere.command\",\n",
" service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
" compartment_id=\"MY_OCID\",\n",
+ " model_kwargs={\"temperature\": 0, \"max_tokens\": 500},\n",
")\n",
"\n",
- "response = llm.invoke(\"Tell me one fact about earth\", temperature=0.7)\n",
+ "prompt = PromptTemplate(input_variables=[\"query\"], template=\"{query}\")\n",
+ "llm_chain = prompt | llm\n",
+ "\n",
+ "response = llm_chain.invoke(\"what is the capital of france?\")\n",
"print(response)"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Streaming"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
- "from langchain.chains import LLMChain\n",
- "from langchain_core.prompts import PromptTemplate\n",
- "\n",
- "# Use Session Token to authN\n",
"llm = OCIGenAI(\n",
- " model_id=\"MY_MODEL\",\n",
+ " model_id=\"cohere.command\",\n",
" service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
" compartment_id=\"MY_OCID\",\n",
- " auth_type=\"SECURITY_TOKEN\",\n",
- " auth_profile=\"MY_PROFILE\", # replace with your profile name\n",
- " model_kwargs={\"temperature\": 0.7, \"top_p\": 0.75, \"max_tokens\": 200},\n",
+ " model_kwargs={\"temperature\": 0, \"max_tokens\": 500},\n",
")\n",
"\n",
- "prompt = PromptTemplate(input_variables=[\"query\"], template=\"{query}\")\n",
- "\n",
- "llm_chain = LLMChain(llm=llm, prompt=prompt)\n",
+ "for chunk in llm.stream(\"Write me a song about sparkling water.\"):\n",
+ " print(chunk, end=\"\", flush=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Authentication\n",
+ "The authentication methods supported for LlamaIndex are equivalent to those used with other OCI services and follow the __[standard SDK authentication](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm)__ methods, specifically API Key, session token, instance principal, and resource principal.\n",
"\n",
- "response = llm_chain.invoke(\"what is the capital of france?\")\n",
- "print(response)"
+ "API key is the default authentication method used in the examples above. The following example demonstrates how to use a different authentication method (session token)"
]
},
{
@@ -117,49 +129,39 @@
"metadata": {},
"outputs": [],
"source": [
- "from langchain_community.embeddings import OCIGenAIEmbeddings\n",
- "from langchain_community.vectorstores import FAISS\n",
- "from langchain_core.output_parsers import StrOutputParser\n",
- "from langchain_core.runnables import RunnablePassthrough\n",
- "\n",
- "embeddings = OCIGenAIEmbeddings(\n",
- " model_id=\"MY_EMBEDDING_MODEL\",\n",
+ "llm = OCIGenAI(\n",
+ " model_id=\"cohere.command\",\n",
" service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
" compartment_id=\"MY_OCID\",\n",
- ")\n",
- "\n",
- "vectorstore = FAISS.from_texts(\n",
- " [\n",
- " \"Larry Ellison co-founded Oracle Corporation in 1977 with Bob Miner and Ed Oates.\",\n",
- " \"Oracle Corporation is an American multinational computer technology company headquartered in Austin, Texas, United States.\",\n",
- " ],\n",
- " embedding=embeddings,\n",
- ")\n",
- "\n",
- "retriever = vectorstore.as_retriever()\n",
- "\n",
- "template = \"\"\"Answer the question based only on the following context:\n",
- "{context}\n",
- " \n",
- "Question: {question}\n",
- "\"\"\"\n",
- "prompt = PromptTemplate.from_template(template)\n",
+ " auth_type=\"SECURITY_TOKEN\",\n",
+ " auth_profile=\"MY_PROFILE\", # replace with your profile name\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Dedicated AI Cluster\n",
+ "To access models hosted in a dedicated AI cluster __[create an endpoint](https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai-inference/20231130/)__ whose assigned OCID (currently prefixed by ‘ocid1.generativeaiendpoint.oc1.us-chicago-1’) is used as your model ID.\n",
"\n",
+ "When accessing models hosted in a dedicated AI cluster you will need to initialize the OCIGenAI interface with two extra required params (\"provider\" and \"context_size\")."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"llm = OCIGenAI(\n",
- " model_id=\"MY_MODEL\",\n",
+ " model_id=\"ocid1.generativeaiendpoint.oc1.us-chicago-1....\",\n",
" service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n",
- " compartment_id=\"MY_OCID\",\n",
- ")\n",
- "\n",
- "chain = (\n",
- " {\"context\": retriever, \"question\": RunnablePassthrough()}\n",
- " | prompt\n",
- " | llm\n",
- " | StrOutputParser()\n",
- ")\n",
- "\n",
- "print(chain.invoke(\"when was oracle founded?\"))\n",
- "print(chain.invoke(\"where is oracle headquartered?\"))"
+ " compartment_id=\"DEDICATED_COMPARTMENT_OCID\",\n",
+ " auth_profile=\"MY_PROFILE\", # replace with your profile name,\n",
+ " provider=\"MODEL_PROVIDER\", # e.g., \"cohere\" or \"meta\"\n",
+ " context_size=\"MODEL_CONTEXT_SIZE\", # e.g., 128000\n",
+ ")"
]
}
],
diff --git a/docs/docs/integrations/providers/ascend.mdx b/docs/docs/integrations/providers/ascend.mdx
new file mode 100644
index 0000000000000..b8c1769a48965
--- /dev/null
+++ b/docs/docs/integrations/providers/ascend.mdx
@@ -0,0 +1,24 @@
+# Ascend
+
+>[Ascend](https://https://www.hiascend.com/) is Natural Process Unit provide by Huawei
+
+This page covers how to use ascend NPU with LangChain.
+
+### Installation
+
+Install using torch-npu using:
+
+```bash
+pip install torch-npu
+```
+
+Please follow the installation instructions as specified below:
+* Install CANN as shown [here](https://www.hiascend.com/document/detail/zh/canncommercial/700/quickstart/quickstart/quickstart_18_0002.html).
+
+### Embedding Models
+
+See a [usage example](/docs/integrations/text_embedding/ascend).
+
+```python
+from langchain_community.embeddings import AscendEmbeddings
+```
diff --git a/docs/docs/integrations/providers/oci.mdx b/docs/docs/integrations/providers/oci.mdx
index e0b3570028f4f..5037fb86f192f 100644
--- a/docs/docs/integrations/providers/oci.mdx
+++ b/docs/docs/integrations/providers/oci.mdx
@@ -2,27 +2,29 @@
The `LangChain` integrations related to [Oracle Cloud Infrastructure](https://www.oracle.com/artificial-intelligence/).
-## LLMs
-
-### OCI Generative AI
+## OCI Generative AI
> Oracle Cloud Infrastructure (OCI) [Generative AI](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm) is a fully managed service that provides a set of state-of-the-art,
> customizable large language models (LLMs) that cover a wide range of use cases, and which are available through a single API.
> Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned
> custom models based on your own data on dedicated AI clusters.
-To use, you should have the latest `oci` python SDK installed.
+To use, you should have the latest `oci` python SDK and the langchain_community package installed.
```bash
-pip install -U oci
+pip install -U oci langchain-community
```
-See [usage examples](/docs/integrations/llms/oci_generative_ai).
+See [chat](/docs/integrations/llms/oci_generative_ai), [complete](/docs/integrations/chat/oci_generative_ai), and [embedding](/docs/integrations/text_embedding/oci_generative_ai) usage examples.
```python
+from langchain_community.chat_models import ChatOCIGenAI
+
from langchain_community.llms import OCIGenAI
+
+from langchain_community.embeddings import OCIGenAIEmbeddings
```
-### OCI Data Science Model Deployment Endpoint
+## OCI Data Science Model Deployment Endpoint
> [OCI Data Science](https://docs.oracle.com/en-us/iaas/data-science/using/home.htm) is a
> fully managed and serverless platform for data science teams. Using the OCI Data Science
@@ -47,12 +49,3 @@ from langchain_community.llms import OCIModelDeploymentVLLM
from langchain_community.llms import OCIModelDeploymentTGI
```
-## Text Embedding Models
-
-### OCI Generative AI
-
-See [usage examples](/docs/integrations/text_embedding/oci_generative_ai).
-
-```python
-from langchain_community.embeddings import OCIGenAIEmbeddings
-```
\ No newline at end of file
diff --git a/docs/docs/integrations/text_embedding/ascend.ipynb b/docs/docs/integrations/text_embedding/ascend.ipynb
new file mode 100644
index 0000000000000..4d3559f837ce3
--- /dev/null
+++ b/docs/docs/integrations/text_embedding/ascend.ipynb
@@ -0,0 +1,183 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "a636f6f3-00d7-4248-8c36-3da51190e882",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[-0.04053403 -0.05560051 -0.04385472 ... 0.09371872 0.02846981\n",
+ " -0.00576814]\n"
+ ]
+ }
+ ],
+ "source": [
+ "from langchain_community.embeddings import AscendEmbeddings\n",
+ "\n",
+ "model = AscendEmbeddings(\n",
+ " model_path=\"/root/.cache/modelscope/hub/yangjhchs/acge_text_embedding\",\n",
+ " device_id=0,\n",
+ " query_instruction=\"Represend this sentence for searching relevant passages: \",\n",
+ ")\n",
+ "emb = model.embed_query(\"hellow\")\n",
+ "print(emb)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "8d29ddaa-eef3-4a4e-93d8-0f1c13525fb4",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "We strongly recommend passing in an `attention_mask` since your input_ids may be padded. See https://huggingface.co/docs/transformers/troubleshooting#incorrect-output-when-padding-tokens-arent-masked.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[[-0.00348254 0.03098977 -0.00203087 ... 0.08492374 0.03970494\n",
+ " -0.03372753]\n",
+ " [-0.02198593 -0.01601127 0.00215684 ... 0.06065163 0.00126425\n",
+ " -0.03634358]]\n"
+ ]
+ }
+ ],
+ "source": [
+ "doc_embs = model.embed_documents(\n",
+ " [\"This is a content of the document\", \"This is another document\"]\n",
+ ")\n",
+ "print(doc_embs)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "797a720d-c478-4254-be2c-975bc4529f57",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "model.aembed_query(\"hellow\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "57e62e53-4d2c-4532-9b77-a46bc3da1130",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([-0.04053403, -0.05560051, -0.04385472, ..., 0.09371872,\n",
+ " 0.02846981, -0.00576814], dtype=float32)"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "await model.aembed_query(\"hellow\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "7e260457-8b50-4ca3-8f76-8a76d8bba8c8",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "model.aembed_documents(\n",
+ " [\"This is a content of the document\", \"This is another document\"]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "ce954b94-aaac-4d2c-80be-b2988c16af6d",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[-0.00348254, 0.03098977, -0.00203087, ..., 0.08492374,\n",
+ " 0.03970494, -0.03372753],\n",
+ " [-0.02198593, -0.01601127, 0.00215684, ..., 0.06065163,\n",
+ " 0.00126425, -0.03634358]], dtype=float32)"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "await model.aembed_documents(\n",
+ " [\"This is a content of the document\", \"This is another document\"]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7823d69d-de79-4f95-90dd-38f4bdeb9bcc",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/docs/tutorials/llm_chain.ipynb b/docs/docs/tutorials/llm_chain.ipynb
index aaa860250023a..c768c6534c787 100644
--- a/docs/docs/tutorials/llm_chain.ipynb
+++ b/docs/docs/tutorials/llm_chain.ipynb
@@ -231,7 +231,7 @@
"id": "d508b79d",
"metadata": {},
"source": [
- "More commonly, we can \"chain\" the model with this output parser. This means this output parser will get called everytime in this chain. This chain takes on the input type of the language model (string or list of message) and returns the output type of the output parser (string).\n",
+ "More commonly, we can \"chain\" the model with this output parser. This means this output parser will get called every time in this chain. This chain takes on the input type of the language model (string or list of message) and returns the output type of the output parser (string).\n",
"\n",
"We can easily create the chain using the `|` operator. The `|` operator is used in LangChain to combine two elements together."
]
diff --git a/docs/docs/tutorials/pdf_qa.ipynb b/docs/docs/tutorials/pdf_qa.ipynb
index 00b2b4c11c888..e8931ff24c439 100644
--- a/docs/docs/tutorials/pdf_qa.ipynb
+++ b/docs/docs/tutorials/pdf_qa.ipynb
@@ -122,7 +122,7 @@
"```{=mdx}\n",
"import ChatModelTabs from \"@theme/ChatModelTabs\";\n",
"\n",
- "\n",
+ "\n",
"```"
]
},
diff --git a/docs/vercel_requirements.txt b/docs/vercel_requirements.txt
index 2883c3eb5e87b..a03fec9f9f631 100644
--- a/docs/vercel_requirements.txt
+++ b/docs/vercel_requirements.txt
@@ -7,5 +7,5 @@ langchain-cohere
langchain-astradb
langchain-nvidia-ai-endpoints
langchain-elasticsearch
-urllib3==1.26.18
+urllib3==1.26.19
nbconvert==7.16.4
diff --git a/libs/community/extended_testing_deps.txt b/libs/community/extended_testing_deps.txt
index 9f5e8284af6b6..db8d9cdfd0b8d 100644
--- a/libs/community/extended_testing_deps.txt
+++ b/libs/community/extended_testing_deps.txt
@@ -46,7 +46,7 @@ mwxml>=0.3.3,<0.4
newspaper3k>=0.2.8,<0.3
numexpr>=2.8.6,<3
nvidia-riva-client>=2.14.0,<3
-oci>=2.119.1,<3
+oci>=2.128.0,<3
openai<2
openapi-pydantic>=0.3.2,<0.4
oracle-ads>=2.9.1,<3
diff --git a/libs/community/langchain_community/chains/pebblo_retrieval/base.py b/libs/community/langchain_community/chains/pebblo_retrieval/base.py
index 6097c5b29cb04..4fb769231484a 100644
--- a/libs/community/langchain_community/chains/pebblo_retrieval/base.py
+++ b/libs/community/langchain_community/chains/pebblo_retrieval/base.py
@@ -5,6 +5,7 @@
import datetime
import inspect
+import json
import logging
from http import HTTPStatus
from typing import Any, Dict, List, Optional
@@ -72,7 +73,9 @@ class PebbloRetrievalQA(Chain):
"""Pebblo cloud API key for app."""
classifier_url: str = CLASSIFIER_URL #: :meta private:
"""Classifier endpoint."""
- _discover_sent: bool = False #: :meta private:
+ classifier_location: str = "local" #: :meta private:
+ """Classifier location. It could be either of 'local' or 'pebblo-cloud'."""
+ _discover_sent = False #: :meta private:
"""Flag to check if discover payload has been sent."""
_prompt_sent: bool = False #: :meta private:
"""Flag to check if prompt payload has been sent."""
@@ -94,6 +97,7 @@ def _call(
answer, docs = res['result'], res['source_documents']
"""
prompt_time = datetime.datetime.now().isoformat()
+ PebbloRetrievalQA.set_prompt_sent(value=False)
_run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager()
question = inputs[self.input_key]
auth_context = inputs.get(self.auth_context_key, {})
@@ -115,7 +119,9 @@ def _call(
"name": self.app_name,
"context": [
{
- "retrieved_from": doc.metadata.get("source"),
+ "retrieved_from": doc.metadata.get(
+ "full_path", doc.metadata.get("source")
+ ),
"doc": doc.page_content,
"vector_db": self.retriever.vectorstore.__class__.__name__,
}
@@ -131,6 +137,7 @@ def _call(
"user_identities": auth_context.user_auth
if auth_context and hasattr(auth_context, "user_auth")
else [],
+ "classifier_location": self.classifier_location,
}
qa_payload = Qa(**qa)
self._send_prompt(qa_payload)
@@ -220,6 +227,7 @@ def from_chain_type(
chain_type_kwargs: Optional[dict] = None,
api_key: Optional[str] = None,
classifier_url: str = CLASSIFIER_URL,
+ classifier_location: str = "local",
**kwargs: Any,
) -> "PebbloRetrievalQA":
"""Load chain from chain type."""
@@ -231,7 +239,7 @@ def from_chain_type(
)
# generate app
- app = PebbloRetrievalQA._get_app_details(
+ app: App = PebbloRetrievalQA._get_app_details(
app_name=app_name,
description=description,
owner=owner,
@@ -240,7 +248,10 @@ def from_chain_type(
)
PebbloRetrievalQA._send_discover(
- app, api_key=api_key, classifier_url=classifier_url
+ app,
+ api_key=api_key,
+ classifier_url=classifier_url,
+ classifier_location=classifier_location,
)
return cls(
@@ -250,6 +261,7 @@ def from_chain_type(
description=description,
api_key=api_key,
classifier_url=classifier_url,
+ classifier_location=classifier_location,
**kwargs,
)
@@ -300,7 +312,9 @@ async def _aget_docs(
)
@staticmethod
- def _get_app_details(app_name, owner, description, llm, **kwargs) -> App: # type: ignore
+ def _get_app_details( # type: ignore
+ app_name: str, owner: str, description: str, llm: BaseLanguageModel, **kwargs
+ ) -> App:
"""Fetch app details. Internal method.
Returns:
App: App details.
@@ -319,38 +333,49 @@ def _get_app_details(app_name, owner, description, llm, **kwargs) -> App: # typ
return app
@staticmethod
- def _send_discover(app, api_key, classifier_url) -> None: # type: ignore
+ def _send_discover(
+ app: App,
+ api_key: Optional[str],
+ classifier_url: str,
+ classifier_location: str,
+ ) -> None: # type: ignore
"""Send app discovery payload to pebblo-server. Internal method."""
headers = {
"Accept": "application/json",
"Content-Type": "application/json",
}
payload = app.dict(exclude_unset=True)
- app_discover_url = f"{classifier_url}{APP_DISCOVER_URL}"
- try:
- pebblo_resp = requests.post(
- app_discover_url, headers=headers, json=payload, timeout=20
- )
- logger.debug("discover-payload: %s", payload)
- logger.debug(
- "send_discover[local]: request url %s, body %s len %s\
- response status %s body %s",
- pebblo_resp.request.url,
- str(pebblo_resp.request.body),
- str(len(pebblo_resp.request.body if pebblo_resp.request.body else [])),
- str(pebblo_resp.status_code),
- pebblo_resp.json(),
- )
- if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
- PebbloRetrievalQA.set_discover_sent()
- else:
- logger.warning(
- f"Received unexpected HTTP response code: {pebblo_resp.status_code}"
+ if classifier_location == "local":
+ app_discover_url = f"{classifier_url}{APP_DISCOVER_URL}"
+ try:
+ pebblo_resp = requests.post(
+ app_discover_url, headers=headers, json=payload, timeout=20
)
- except requests.exceptions.RequestException:
- logger.warning("Unable to reach pebblo server.")
- except Exception as e:
- logger.warning("An Exception caught in _send_discover: local %s", e)
+ logger.debug("discover-payload: %s", payload)
+ logger.debug(
+ "send_discover[local]: request url %s, body %s len %s\
+ response status %s body %s",
+ pebblo_resp.request.url,
+ str(pebblo_resp.request.body),
+ str(
+ len(
+ pebblo_resp.request.body if pebblo_resp.request.body else []
+ )
+ ),
+ str(pebblo_resp.status_code),
+ pebblo_resp.json(),
+ )
+ if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
+ PebbloRetrievalQA.set_discover_sent()
+ else:
+ logger.warning(
+ "Received unexpected HTTP response code:"
+ + f"{pebblo_resp.status_code}"
+ )
+ except requests.exceptions.RequestException:
+ logger.warning("Unable to reach pebblo server.")
+ except Exception as e:
+ logger.warning("An Exception caught in _send_discover: local %s", e)
if api_key:
try:
@@ -385,8 +410,8 @@ def set_discover_sent(cls) -> None:
cls._discover_sent = True
@classmethod
- def set_prompt_sent(cls) -> None:
- cls._prompt_sent = True
+ def set_prompt_sent(cls, value: bool = True) -> None:
+ cls._prompt_sent = value
def _send_prompt(self, qa_payload: Qa) -> None:
headers = {
@@ -394,39 +419,73 @@ def _send_prompt(self, qa_payload: Qa) -> None:
"Content-Type": "application/json",
}
app_discover_url = f"{self.classifier_url}{PROMPT_URL}"
- try:
- pebblo_resp = requests.post(
- app_discover_url, headers=headers, json=qa_payload.dict(), timeout=20
- )
- logger.debug("prompt-payload: %s", qa_payload)
- logger.debug(
- "send_prompt[local]: request url %s, body %s len %s\
- response status %s body %s",
- pebblo_resp.request.url,
- str(pebblo_resp.request.body),
- str(len(pebblo_resp.request.body if pebblo_resp.request.body else [])),
- str(pebblo_resp.status_code),
- pebblo_resp.json(),
- )
- if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
- PebbloRetrievalQA.set_prompt_sent()
- else:
- logger.warning(
- f"Received unexpected HTTP response code: {pebblo_resp.status_code}"
+ pebblo_resp = None
+ payload = qa_payload.dict(exclude_unset=True)
+ if self.classifier_location == "local":
+ try:
+ pebblo_resp = requests.post(
+ app_discover_url,
+ headers=headers,
+ json=payload,
+ timeout=20,
)
- except requests.exceptions.RequestException:
- logger.warning("Unable to reach pebblo server.")
- except Exception as e:
- logger.warning("An Exception caught in _send_discover: local %s", e)
+ logger.debug("prompt-payload: %s", payload)
+ logger.debug(
+ "send_prompt[local]: request url %s, body %s len %s\
+ response status %s body %s",
+ pebblo_resp.request.url,
+ str(pebblo_resp.request.body),
+ str(
+ len(
+ pebblo_resp.request.body if pebblo_resp.request.body else []
+ )
+ ),
+ str(pebblo_resp.status_code),
+ pebblo_resp.json(),
+ )
+ if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
+ PebbloRetrievalQA.set_prompt_sent()
+ else:
+ logger.warning(
+ "Received unexpected HTTP response code:"
+ + f"{pebblo_resp.status_code}"
+ )
+ except requests.exceptions.RequestException:
+ logger.warning("Unable to reach pebblo server.")
+ except Exception as e:
+ logger.warning("An Exception caught in _send_discover: local %s", e)
+ # If classifier location is local, then response, context and prompt
+ # should be fetched from pebblo_resp and replaced in payload.
if self.api_key:
+ if self.classifier_location == "local":
+ if pebblo_resp:
+ payload["response"] = (
+ json.loads(pebblo_resp.text)
+ .get("retrieval_data", {})
+ .get("response", {})
+ )
+ payload["context"] = (
+ json.loads(pebblo_resp.text)
+ .get("retrieval_data", {})
+ .get("context", [])
+ )
+ payload["prompt"] = (
+ json.loads(pebblo_resp.text)
+ .get("retrieval_data", {})
+ .get("prompt", {})
+ )
+ else:
+ payload["response"] = None
+ payload["context"] = None
+ payload["prompt"] = None
+ headers.update({"x-api-key": self.api_key})
+ pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{PROMPT_URL}"
try:
- headers.update({"x-api-key": self.api_key})
- pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{PROMPT_URL}"
pebblo_cloud_response = requests.post(
pebblo_cloud_url,
headers=headers,
- json=qa_payload.dict(),
+ json=payload,
timeout=20,
)
@@ -449,9 +508,12 @@ def _send_prompt(self, qa_payload: Qa) -> None:
logger.warning("Unable to reach Pebblo cloud server.")
except Exception as e:
logger.warning("An Exception caught in _send_prompt: cloud %s", e)
+ elif self.classifier_location == "pebblo-cloud":
+ logger.warning("API key is missing for sending prompt to Pebblo cloud.")
+ raise NameError("API key is missing for sending prompt to Pebblo cloud.")
@classmethod
- def get_chain_details(cls, llm, **kwargs): # type: ignore
+ def get_chain_details(cls, llm: BaseLanguageModel, **kwargs): # type: ignore
llm_dict = llm.__dict__
chain = [
{
@@ -474,6 +536,6 @@ def get_chain_details(cls, llm, **kwargs): # type: ignore
),
}
],
- }
+ },
]
return chain
diff --git a/libs/community/langchain_community/chains/pebblo_retrieval/models.py b/libs/community/langchain_community/chains/pebblo_retrieval/models.py
index 3dc344dd38c72..3b7f94d44c8a4 100644
--- a/libs/community/langchain_community/chains/pebblo_retrieval/models.py
+++ b/libs/community/langchain_community/chains/pebblo_retrieval/models.py
@@ -1,6 +1,6 @@
"""Models for the PebbloRetrievalQA chain."""
-from typing import Any, List, Optional
+from typing import Any, List, Optional, Union
from langchain_core.pydantic_v1 import BaseModel
@@ -137,9 +137,10 @@ class Prompt(BaseModel):
class Qa(BaseModel):
name: str
- context: List[Optional[Context]]
- prompt: Prompt
- response: Prompt
+ context: Union[List[Optional[Context]], Optional[Context]]
+ prompt: Optional[Prompt]
+ response: Optional[Prompt]
prompt_time: str
user: str
user_identities: Optional[List[str]]
+ classifier_location: str
diff --git a/libs/community/langchain_community/chat_models/__init__.py b/libs/community/langchain_community/chat_models/__init__.py
index 7b942a26caca0..af25b60184d78 100644
--- a/libs/community/langchain_community/chat_models/__init__.py
+++ b/libs/community/langchain_community/chat_models/__init__.py
@@ -121,6 +121,9 @@
from langchain_community.chat_models.mlx import (
ChatMLX,
)
+ from langchain_community.chat_models.oci_generative_ai import (
+ ChatOCIGenAI, # noqa: F401
+ )
from langchain_community.chat_models.octoai import ChatOctoAI
from langchain_community.chat_models.ollama import (
ChatOllama,
@@ -194,6 +197,7 @@
"ChatMLflowAIGateway",
"ChatMaritalk",
"ChatMlflow",
+ "ChatOCIGenAI",
"ChatOllama",
"ChatOpenAI",
"ChatPerplexity",
@@ -248,6 +252,7 @@
"ChatMaritalk": "langchain_community.chat_models.maritalk",
"ChatMlflow": "langchain_community.chat_models.mlflow",
"ChatOctoAI": "langchain_community.chat_models.octoai",
+ "ChatOCIGenAI": "langchain_community.chat_models.oci_generative_ai",
"ChatOllama": "langchain_community.chat_models.ollama",
"ChatOpenAI": "langchain_community.chat_models.openai",
"ChatPerplexity": "langchain_community.chat_models.perplexity",
diff --git a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py
index 019acffd15c65..3b75ab2524aee 100644
--- a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py
+++ b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py
@@ -109,22 +109,195 @@ def _convert_dict_to_message(_dict: Mapping[str, Any]) -> AIMessage:
class QianfanChatEndpoint(BaseChatModel):
- """Baidu Qianfan chat models.
+ """Baidu Qianfan chat model integration.
+
+ Setup:
+ Install ``qianfan`` and set environment variables ``QIANFAN_AK``, ``QIANFAN_SK``.
+
+ .. code-block:: bash
+
+ pip install qianfan
+ export QIANFAN_AK="your-api-key"
+ export QIANFAN_SK="your-secret_key"
+
+ Key init args — completion params:
+ model: str
+ Name of Qianfan model to use.
+ temperature: Optional[float]
+ Sampling temperature.
+ endpoint: Optional[str]
+ Endpoint of the Qianfan LLM
+ top_p: Optional[float]
+ What probability mass to use.
+
+ Key init args — client params:
+ timeout: Optional[int]
+ Timeout for requests.
+ api_key: Optional[str]
+ Qianfan API KEY. If not passed in will be read from env var QIANFAN_AK.
+ secret_key: Optional[str]
+ Qianfan SECRET KEY. If not passed in will be read from env var QIANFAN_SK.
+
+ See full list of supported init args and their descriptions in the params section.
+
+ Instantiate:
+ .. code-block:: python
- To use, you should have the ``qianfan`` python package installed, and
- the environment variable ``qianfan_ak`` and ``qianfan_sk`` set with your
- API key and Secret Key.
+ from langchain_community.chat_models import QianfanChatEndpoint
- ak, sk are required parameters
- which you could get from https://cloud.baidu.com/product/wenxinworkshop
+ qianfan_chat = QianfanChatEndpoint(
+ model="ERNIE-3.5-8K",
+ temperature=0.2,
+ timeout=30,
+ # api_key="...",
+ # secret_key="...",
+ # top_p="...",
+ # other params...
+ )
+
+ Invoke:
+ .. code-block:: python
+
+ messages = [
+ ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
+ ("human", "我喜欢编程。"),
+ ]
+ qianfan_chat.invoke(message)
- Example:
.. code-block:: python
- from langchain_community.chat_models import QianfanChatEndpoint
- qianfan_chat = QianfanChatEndpoint(model="ERNIE-Bot",
- endpoint="your_endpoint", qianfan_ak="your_ak", qianfan_sk="your_sk")
- """
+ AIMessage(content='I enjoy programming.', additional_kwargs={'finish_reason': 'normal', 'request_id': 'as-7848zeqn1c', 'object': 'chat.completion', 'search_info': []}, response_metadata={'token_usage': {'prompt_tokens': 16, 'completion_tokens': 4, 'total_tokens': 20}, 'model_name': 'ERNIE-3.5-8K', 'finish_reason': 'normal', 'id': 'as-7848zeqn1c', 'object': 'chat.completion', 'created': 1719153606, 'result': 'I enjoy programming.', 'is_truncated': False, 'need_clear_history': False, 'usage': {'prompt_tokens': 16, 'completion_tokens': 4, 'total_tokens': 20}}, id='run-4bca0c10-5043-456b-a5be-2f62a980f3f0-0')
+
+ Stream:
+ .. code-block:: python
+
+ for chunk in qianfan_chat.stream(messages):
+ print(chunk)
+
+ .. code-block:: python
+
+ content='I enjoy' response_metadata={'finish_reason': 'normal', 'request_id': 'as-yz0yz1w1rq', 'object': 'chat.completion', 'search_info': []} id='run-0fa9da50-003e-4a26-ba16-dbfe96249b8b' role='assistant'
+ content=' programming.' response_metadata={'finish_reason': 'normal', 'request_id': 'as-yz0yz1w1rq', 'object': 'chat.completion', 'search_info': []} id='run-0fa9da50-003e-4a26-ba16-dbfe96249b8b' role='assistant'
+
+ .. code-block:: python
+
+ full = next(stream)
+ for chunk in stream:
+ full += chunk
+ full
+
+ .. code-block::
+
+ AIMessageChunk(content='I enjoy programming.', response_metadata={'finish_reason': 'normalnormal', 'request_id': 'as-p63cnn3ppnas-p63cnn3ppn', 'object': 'chat.completionchat.completion', 'search_info': []}, id='run-09a8cbbd-5ded-4529-981d-5bc9d1206404')
+
+ Async:
+ .. code-block:: python
+
+ await qianfan_chat.ainvoke(messages)
+
+ # stream:
+ # async for chunk in qianfan_chat.astream(messages):
+ # print(chunk)
+
+ # batch:
+ # await qianfan_chat.abatch([messages])
+
+ .. code-block:: python
+
+ [AIMessage(content='I enjoy programming.', additional_kwargs={'finish_reason': 'normal', 'request_id': 'as-mpqa8qa1qb', 'object': 'chat.completion', 'search_info': []}, response_metadata={'token_usage': {'prompt_tokens': 16, 'completion_tokens': 4, 'total_tokens': 20}, 'model_name': 'ERNIE-3.5-8K', 'finish_reason': 'normal', 'id': 'as-mpqa8qa1qb', 'object': 'chat.completion', 'created': 1719155120, 'result': 'I enjoy programming.', 'is_truncated': False, 'need_clear_history': False, 'usage': {'prompt_tokens': 16, 'completion_tokens': 4, 'total_tokens': 20}}, id='run-443b2231-08f9-4725-b807-b77d0507ad44-0')]
+
+ Tool calling:
+ .. code-block:: python
+
+ from langchain_core.pydantic_v1 import BaseModel, Field
+
+
+ class GetWeather(BaseModel):
+ '''Get the current weather in a given location'''
+
+ location: str = Field(
+ ..., description="The city and state, e.g. San Francisco, CA"
+ )
+
+
+ class GetPopulation(BaseModel):
+ '''Get the current population in a given location'''
+
+ location: str = Field(
+ ..., description="The city and state, e.g. San Francisco, CA"
+ )
+
+ chat_with_tools = qianfan_chat.bind_tools([GetWeather, GetPopulation])
+ ai_msg = chat_with_tools.invoke(
+ "Which city is hotter today and which is bigger: LA or NY?"
+ )
+ ai_msg.tool_calls
+
+ .. code-block:: python
+
+ [
+ {
+ 'name': 'GetWeather',
+ 'args': {'location': 'Los Angeles, CA'},
+ 'id': '533e5f63-a3dc-40f2-9d9c-22b1feee62e0'
+ }
+ ]
+
+ Structured output:
+ .. code-block:: python
+
+ from typing import Optional
+
+ from langchain_core.pydantic_v1 import BaseModel, Field
+
+
+ class Joke(BaseModel):
+ '''Joke to tell user.'''
+
+ setup: str = Field(description="The setup of the joke")
+ punchline: str = Field(description="The punchline to the joke")
+ rating: Optional[int] = Field(description="How funny the joke is, from 1 to 10")
+
+
+ structured_chat = qianfan_chat.with_structured_output(Joke)
+ structured_chat.invoke("Tell me a joke about cats")
+
+ .. code-block:: python
+
+ Joke(
+ setup='A cat is sitting in front of a mirror and sees another cat. What does the cat think?',
+ punchline="The cat doesn't think it's another cat, it thinks it's another mirror.",
+ rating=None
+ )
+
+ Response metadata
+ .. code-block:: python
+
+ ai_msg = qianfan_chat.invoke(messages)
+ ai_msg.response_metadata
+
+ .. code-block:: python
+ {
+ 'token_usage': {
+ 'prompt_tokens': 16,
+ 'completion_tokens': 4,
+ 'total_tokens': 20},
+ 'model_name': 'ERNIE-3.5-8K',
+ 'finish_reason': 'normal',
+ 'id': 'as-qbzwtydqmi',
+ 'object': 'chat.completion',
+ 'created': 1719158153,
+ 'result': 'I enjoy programming.',
+ 'is_truncated': False,
+ 'need_clear_history': False,
+ 'usage': {
+ 'prompt_tokens': 16,
+ 'completion_tokens': 4,
+ 'total_tokens': 20
+ }
+ }
+
+ """ # noqa: E501
init_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""init kwargs for qianfan client init, such as `query_per_second` which is
diff --git a/libs/community/langchain_community/chat_models/oci_generative_ai.py b/libs/community/langchain_community/chat_models/oci_generative_ai.py
new file mode 100644
index 0000000000000..9409b1a2fb743
--- /dev/null
+++ b/libs/community/langchain_community/chat_models/oci_generative_ai.py
@@ -0,0 +1,363 @@
+import json
+from abc import ABC, abstractmethod
+from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence
+
+from langchain_core.callbacks import CallbackManagerForLLMRun
+from langchain_core.language_models.chat_models import (
+ BaseChatModel,
+ generate_from_stream,
+)
+from langchain_core.messages import (
+ AIMessage,
+ AIMessageChunk,
+ BaseMessage,
+ ChatMessage,
+ HumanMessage,
+ SystemMessage,
+)
+from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
+from langchain_core.pydantic_v1 import Extra
+
+from langchain_community.llms.oci_generative_ai import OCIGenAIBase
+from langchain_community.llms.utils import enforce_stop_tokens
+
+CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
+
+
+class Provider(ABC):
+ @property
+ @abstractmethod
+ def stop_sequence_key(self) -> str:
+ ...
+
+ @abstractmethod
+ def chat_response_to_text(self, response: Any) -> str:
+ ...
+
+ @abstractmethod
+ def chat_stream_to_text(self, event_data: Dict) -> str:
+ ...
+
+ @abstractmethod
+ def chat_generation_info(self, response: Any) -> Dict[str, Any]:
+ ...
+
+ @abstractmethod
+ def get_role(self, message: BaseMessage) -> str:
+ ...
+
+ @abstractmethod
+ def messages_to_oci_params(self, messages: Any) -> Dict[str, Any]:
+ ...
+
+
+class CohereProvider(Provider):
+ stop_sequence_key = "stop_sequences"
+
+ def __init__(self) -> None:
+ from oci.generative_ai_inference import models
+
+ self.oci_chat_request = models.CohereChatRequest
+ self.oci_chat_message = {
+ "USER": models.CohereUserMessage,
+ "CHATBOT": models.CohereChatBotMessage,
+ "SYSTEM": models.CohereSystemMessage,
+ }
+ self.chat_api_format = models.BaseChatRequest.API_FORMAT_COHERE
+
+ def chat_response_to_text(self, response: Any) -> str:
+ return response.data.chat_response.text
+
+ def chat_stream_to_text(self, event_data: Dict) -> str:
+ if "text" in event_data and "finishReason" not in event_data:
+ return event_data["text"]
+ else:
+ return ""
+
+ def chat_generation_info(self, response: Any) -> Dict[str, Any]:
+ return {
+ "finish_reason": response.data.chat_response.finish_reason,
+ }
+
+ def get_role(self, message: BaseMessage) -> str:
+ if isinstance(message, HumanMessage):
+ return "USER"
+ elif isinstance(message, AIMessage):
+ return "CHATBOT"
+ elif isinstance(message, SystemMessage):
+ return "SYSTEM"
+ else:
+ raise ValueError(f"Got unknown type {message}")
+
+ def messages_to_oci_params(self, messages: Sequence[ChatMessage]) -> Dict[str, Any]:
+ oci_chat_history = [
+ self.oci_chat_message[self.get_role(msg)](message=msg.content)
+ for msg in messages[:-1]
+ ]
+ oci_params = {
+ "message": messages[-1].content,
+ "chat_history": oci_chat_history,
+ "api_format": self.chat_api_format,
+ }
+
+ return oci_params
+
+
+class MetaProvider(Provider):
+ stop_sequence_key = "stop"
+
+ def __init__(self) -> None:
+ from oci.generative_ai_inference import models
+
+ self.oci_chat_request = models.GenericChatRequest
+ self.oci_chat_message = {
+ "USER": models.UserMessage,
+ "SYSTEM": models.SystemMessage,
+ "ASSISTANT": models.AssistantMessage,
+ }
+ self.oci_chat_message_content = models.TextContent
+ self.chat_api_format = models.BaseChatRequest.API_FORMAT_GENERIC
+
+ def chat_response_to_text(self, response: Any) -> str:
+ return response.data.chat_response.choices[0].message.content[0].text
+
+ def chat_stream_to_text(self, event_data: Dict) -> str:
+ if "message" in event_data:
+ return event_data["message"]["content"][0]["text"]
+ else:
+ return ""
+
+ def chat_generation_info(self, response: Any) -> Dict[str, Any]:
+ return {
+ "finish_reason": response.data.chat_response.choices[0].finish_reason,
+ "time_created": str(response.data.chat_response.time_created),
+ }
+
+ def get_role(self, message: BaseMessage) -> str:
+ # meta only supports alternating user/assistant roles
+ if isinstance(message, HumanMessage):
+ return "USER"
+ elif isinstance(message, AIMessage):
+ return "ASSISTANT"
+ elif isinstance(message, SystemMessage):
+ return "SYSTEM"
+ else:
+ raise ValueError(f"Got unknown type {message}")
+
+ def messages_to_oci_params(self, messages: List[BaseMessage]) -> Dict[str, Any]:
+ oci_messages = [
+ self.oci_chat_message[self.get_role(msg)](
+ content=[self.oci_chat_message_content(text=msg.content)]
+ )
+ for msg in messages
+ ]
+ oci_params = {
+ "messages": oci_messages,
+ "api_format": self.chat_api_format,
+ "top_k": -1,
+ }
+
+ return oci_params
+
+
+class ChatOCIGenAI(BaseChatModel, OCIGenAIBase):
+ """ChatOCIGenAI chat model integration.
+
+ Setup:
+ Install ``langchain-community`` and the ``oci`` sdk.
+
+ .. code-block:: bash
+
+ pip install -U langchain-community oci
+
+ Key init args — completion params:
+ model_id: str
+ Id of the OCIGenAI chat model to use, e.g., cohere.command-r-16k.
+ is_stream: bool
+ Whether to stream back partial progress
+ model_kwargs: Optional[Dict]
+ Keyword arguments to pass to the specific model used, e.g., temperature, max_tokens.
+
+ Key init args — client params:
+ service_endpoint: str
+ The endpoint URL for the OCIGenAI service, e.g., https://inference.generativeai.us-chicago-1.oci.oraclecloud.com.
+ compartment_id: str
+ The compartment OCID.
+ auth_type: str
+ The authentication type to use, e.g., API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPAL, RESOURCE_PRINCIPAL.
+ auth_profile: Optional[str]
+ The name of the profile in ~/.oci/config, if not specified , DEFAULT will be used.
+ provider: str
+ Provider name of the model. Default to None, will try to be derived from the model_id otherwise, requires user input.
+ See full list of supported init args and their descriptions in the params section.
+
+ Instantiate:
+ .. code-block:: python
+
+ from langchain_community.chat_models import ChatOCIGenAI
+
+ chat = ChatOCIGenAI(
+ model_id="cohere.command-r-16k",
+ service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com",
+ compartment_id="MY_OCID",
+ model_kwargs={"temperature": 0.7, "max_tokens": 500},
+ )
+
+ Invoke:
+ .. code-block:: python
+ messages = [
+ SystemMessage(content="your are an AI assistant."),
+ AIMessage(content="Hi there human!"),
+ HumanMessage(content="tell me a joke."),
+ ]
+ response = chat.invoke(messages)
+
+ Stream:
+ .. code-block:: python
+
+ for r in chat.stream(messages):
+ print(r.content, end="", flush=True)
+
+ Response metadata
+ .. code-block:: python
+
+ response = chat.invoke(messages)
+ print(response.response_metadata)
+
+ """ # noqa: E501
+
+ class Config:
+ """Configuration for this pydantic object."""
+
+ extra = Extra.forbid
+
+ @property
+ def _llm_type(self) -> str:
+ """Return type of llm."""
+ return "oci_generative_ai_chat"
+
+ @property
+ def _provider_map(self) -> Mapping[str, Any]:
+ """Get the provider map"""
+ return {
+ "cohere": CohereProvider(),
+ "meta": MetaProvider(),
+ }
+
+ @property
+ def _provider(self) -> Any:
+ """Get the internal provider object"""
+ return self._get_provider(provider_map=self._provider_map)
+
+ def _prepare_request(
+ self,
+ messages: List[BaseMessage],
+ stop: Optional[List[str]],
+ kwargs: Dict[str, Any],
+ stream: bool,
+ ) -> Dict[str, Any]:
+ try:
+ from oci.generative_ai_inference import models
+
+ except ImportError as ex:
+ raise ModuleNotFoundError(
+ "Could not import oci python package. "
+ "Please make sure you have the oci package installed."
+ ) from ex
+ oci_params = self._provider.messages_to_oci_params(messages)
+ oci_params["is_stream"] = stream # self.is_stream
+ _model_kwargs = self.model_kwargs or {}
+
+ if stop is not None:
+ _model_kwargs[self._provider.stop_sequence_key] = stop
+
+ chat_params = {**_model_kwargs, **kwargs, **oci_params}
+
+ if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):
+ serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id)
+ else:
+ serving_mode = models.OnDemandServingMode(model_id=self.model_id)
+
+ request = models.ChatDetails(
+ compartment_id=self.compartment_id,
+ serving_mode=serving_mode,
+ chat_request=self._provider.oci_chat_request(**chat_params),
+ )
+
+ return request
+
+ def _generate(
+ self,
+ messages: List[BaseMessage],
+ stop: Optional[List[str]] = None,
+ run_manager: Optional[CallbackManagerForLLMRun] = None,
+ **kwargs: Any,
+ ) -> ChatResult:
+ """Call out to a OCIGenAI chat model.
+
+ Args:
+ messages: list of LangChain messages
+ stop: Optional list of stop words to use.
+
+ Returns:
+ LangChain ChatResult
+
+ Example:
+ .. code-block:: python
+
+ messages = [
+ HumanMessage(content="hello!"),
+ AIMessage(content="Hi there human!"),
+ HumanMessage(content="Meow!")
+ ]
+
+ response = llm.invoke(messages)
+ """
+ if self.is_stream:
+ stream_iter = self._stream(
+ messages, stop=stop, run_manager=run_manager, **kwargs
+ )
+ return generate_from_stream(stream_iter)
+
+ request = self._prepare_request(messages, stop, kwargs, stream=False)
+ response = self.client.chat(request)
+
+ content = self._provider.chat_response_to_text(response)
+
+ if stop is not None:
+ content = enforce_stop_tokens(content, stop)
+
+ generation_info = self._provider.chat_generation_info(response)
+
+ llm_output = {
+ "model_id": response.data.model_id,
+ "model_version": response.data.model_version,
+ "request_id": response.request_id,
+ "content-length": response.headers["content-length"],
+ }
+
+ return ChatResult(
+ generations=[
+ ChatGeneration(
+ message=AIMessage(content=content), generation_info=generation_info
+ )
+ ],
+ llm_output=llm_output,
+ )
+
+ def _stream(
+ self,
+ messages: List[BaseMessage],
+ stop: Optional[List[str]] = None,
+ run_manager: Optional[CallbackManagerForLLMRun] = None,
+ **kwargs: Any,
+ ) -> Iterator[ChatGenerationChunk]:
+ request = self._prepare_request(messages, stop, kwargs, stream=True)
+ response = self.client.chat(request)
+
+ for event in response.data.events():
+ delta = self._provider.chat_stream_to_text(json.loads(event.data))
+ chunk = ChatGenerationChunk(message=AIMessageChunk(content=delta))
+ if run_manager:
+ run_manager.on_llm_new_token(delta, chunk=chunk)
+ yield chunk
diff --git a/libs/community/langchain_community/document_compressors/flashrank_rerank.py b/libs/community/langchain_community/document_compressors/flashrank_rerank.py
index dd3307b43e668..fd66bee659d2b 100644
--- a/libs/community/langchain_community/document_compressors/flashrank_rerank.py
+++ b/libs/community/langchain_community/document_compressors/flashrank_rerank.py
@@ -38,17 +38,20 @@ class Config:
@root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
- try:
- from flashrank import Ranker
- except ImportError:
- raise ImportError(
- "Could not import flashrank python package. "
- "Please install it with `pip install flashrank`."
- )
+ if "client" in values:
+ return values
+ else:
+ try:
+ from flashrank import Ranker
+ except ImportError:
+ raise ImportError(
+ "Could not import flashrank python package. "
+ "Please install it with `pip install flashrank`."
+ )
- values["model"] = values.get("model", DEFAULT_MODEL_NAME)
- values["client"] = Ranker(model_name=values["model"])
- return values
+ values["model"] = values.get("model", DEFAULT_MODEL_NAME)
+ values["client"] = Ranker(model_name=values["model"])
+ return values
def compress_documents(
self,
diff --git a/libs/community/langchain_community/document_loaders/mongodb.py b/libs/community/langchain_community/document_loaders/mongodb.py
index 57b4f35217c80..b1e062226546f 100644
--- a/libs/community/langchain_community/document_loaders/mongodb.py
+++ b/libs/community/langchain_community/document_loaders/mongodb.py
@@ -77,7 +77,19 @@ async def aload(self) -> List[Document]:
# Extract text content from filtered fields or use the entire document
if self.field_names is not None:
- fields = {name: doc[name] for name in self.field_names}
+ fields = {}
+ for name in self.field_names:
+ # Split the field names to handle nested fields
+ keys = name.split(".")
+ value = doc
+ for key in keys:
+ if key in value:
+ value = value[key]
+ else:
+ value = ""
+ break
+ fields[name] = value
+
texts = [str(value) for value in fields.values()]
text = " ".join(texts)
else:
diff --git a/libs/community/langchain_community/document_loaders/pebblo.py b/libs/community/langchain_community/document_loaders/pebblo.py
index ed203b2c88d75..48c8a231fdb61 100644
--- a/libs/community/langchain_community/document_loaders/pebblo.py
+++ b/libs/community/langchain_community/document_loaders/pebblo.py
@@ -46,6 +46,8 @@ def __init__(
api_key: Optional[str] = None,
load_semantic: bool = False,
classifier_url: Optional[str] = None,
+ *,
+ classifier_location: str = "local",
):
if not name or not isinstance(name, str):
raise NameError("Must specify a valid name.")
@@ -65,6 +67,7 @@ def __init__(
self.source_path_size = self.get_source_size(self.source_path)
self.source_aggregate_size = 0
self.classifier_url = classifier_url or CLASSIFIER_URL
+ self.classifier_location = classifier_location
self.loader_details = {
"loader": loader_name,
"source_path": self.source_path,
@@ -158,6 +161,7 @@ def _classify_doc(self, loaded_docs: list, loading_end: bool = False) -> list:
PebbloSafeLoader.set_loader_sent()
doc_content = [doc.dict() for doc in loaded_docs]
docs = []
+ classified_docs = []
for doc in doc_content:
doc_metadata = doc.get("metadata", {})
doc_authorized_identities = doc_metadata.get("authorized_identities", [])
@@ -204,6 +208,7 @@ def _classify_doc(self, loaded_docs: list, loading_end: bool = False) -> list:
"loader_details": self.loader_details,
"loading_end": "false",
"source_owner": self.source_owner,
+ "classifier_location": self.classifier_location,
}
if loading_end is True:
payload["loading_end"] = "true"
@@ -212,39 +217,46 @@ def _classify_doc(self, loaded_docs: list, loading_end: bool = False) -> list:
"source_aggregate_size"
] = self.source_aggregate_size
payload = Doc(**payload).dict(exclude_unset=True)
- load_doc_url = f"{self.classifier_url}{LOADER_DOC_URL}"
- classified_docs = []
- try:
- pebblo_resp = requests.post(
- load_doc_url, headers=headers, json=payload, timeout=300
- )
- classified_docs = json.loads(pebblo_resp.text).get("docs", None)
- if pebblo_resp.status_code not in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
- logger.warning(
- "Received unexpected HTTP response code: %s",
- pebblo_resp.status_code,
+ # Raw payload to be sent to classifier
+ if self.classifier_location == "local":
+ load_doc_url = f"{self.classifier_url}{LOADER_DOC_URL}"
+ try:
+ pebblo_resp = requests.post(
+ load_doc_url, headers=headers, json=payload, timeout=300
)
- logger.debug(
- "send_loader_doc[local]: request url %s, body %s len %s\
- response status %s body %s",
- pebblo_resp.request.url,
- str(pebblo_resp.request.body),
- str(len(pebblo_resp.request.body if pebblo_resp.request.body else [])),
- str(pebblo_resp.status_code),
- pebblo_resp.json(),
- )
- except requests.exceptions.RequestException:
- logger.warning("Unable to reach pebblo server.")
- except Exception as e:
- logger.warning("An Exception caught in _send_loader_doc: local %s", e)
+ classified_docs = json.loads(pebblo_resp.text).get("docs", None)
+ if pebblo_resp.status_code not in [
+ HTTPStatus.OK,
+ HTTPStatus.BAD_GATEWAY,
+ ]:
+ logger.warning(
+ "Received unexpected HTTP response code: %s",
+ pebblo_resp.status_code,
+ )
+ logger.debug(
+ "send_loader_doc[local]: request url %s, body %s len %s\
+ response status %s body %s",
+ pebblo_resp.request.url,
+ str(pebblo_resp.request.body),
+ str(
+ len(
+ pebblo_resp.request.body if pebblo_resp.request.body else []
+ )
+ ),
+ str(pebblo_resp.status_code),
+ pebblo_resp.json(),
+ )
+ except requests.exceptions.RequestException:
+ logger.warning("Unable to reach pebblo server.")
+ except Exception as e:
+ logger.warning("An Exception caught in _send_loader_doc: local %s", e)
+
if self.api_key:
- if not classified_docs:
- return classified_docs
- try:
+ if self.classifier_location == "local":
payload["docs"] = classified_docs
- payload["classified"] = True
- headers.update({"x-api-key": self.api_key})
- pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{LOADER_DOC_URL}"
+ headers.update({"x-api-key": self.api_key})
+ pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{LOADER_DOC_URL}"
+ try:
pebblo_cloud_response = requests.post(
pebblo_cloud_url, headers=headers, json=payload, timeout=20
)
@@ -267,9 +279,10 @@ def _classify_doc(self, loaded_docs: list, loading_end: bool = False) -> list:
logger.warning("Unable to reach Pebblo cloud server.")
except Exception as e:
logger.warning("An Exception caught in _send_loader_doc: cloud %s", e)
+ elif self.classifier_location == "pebblo-cloud":
+ logger.warning("API key is missing for sending docs to Pebblo cloud.")
+ raise NameError("API key is missing for sending docs to Pebblo cloud.")
- if loading_end is True:
- PebbloSafeLoader.set_loader_sent()
return classified_docs
@staticmethod
@@ -298,45 +311,50 @@ def _send_discover(self) -> None:
"Content-Type": "application/json",
}
payload = self.app.dict(exclude_unset=True)
- app_discover_url = f"{self.classifier_url}{APP_DISCOVER_URL}"
- try:
- pebblo_resp = requests.post(
- app_discover_url, headers=headers, json=payload, timeout=20
- )
- logger.debug(
- "send_discover[local]: request url %s, body %s len %s\
- response status %s body %s",
- pebblo_resp.request.url,
- str(pebblo_resp.request.body),
- str(len(pebblo_resp.request.body if pebblo_resp.request.body else [])),
- str(pebblo_resp.status_code),
- pebblo_resp.json(),
- )
- if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
- PebbloSafeLoader.set_discover_sent()
- else:
- logger.warning(
- f"Received unexpected HTTP response code: {pebblo_resp.status_code}"
+ # Raw discover payload to be sent to classifier
+ if self.classifier_location == "local":
+ app_discover_url = f"{self.classifier_url}{APP_DISCOVER_URL}"
+ try:
+ pebblo_resp = requests.post(
+ app_discover_url, headers=headers, json=payload, timeout=20
)
- except requests.exceptions.RequestException:
- logger.warning("Unable to reach pebblo server.")
- except Exception as e:
- logger.warning("An Exception caught in _send_discover: local %s", e)
+ logger.debug(
+ "send_discover[local]: request url %s, body %s len %s\
+ response status %s body %s",
+ pebblo_resp.request.url,
+ str(pebblo_resp.request.body),
+ str(
+ len(
+ pebblo_resp.request.body if pebblo_resp.request.body else []
+ )
+ ),
+ str(pebblo_resp.status_code),
+ pebblo_resp.json(),
+ )
+ if pebblo_resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]:
+ PebbloSafeLoader.set_discover_sent()
+ else:
+ logger.warning(
+ f"Received unexpected HTTP response code:\
+ {pebblo_resp.status_code}"
+ )
+ except requests.exceptions.RequestException:
+ logger.warning("Unable to reach pebblo server.")
+ except Exception as e:
+ logger.warning("An Exception caught in _send_discover: local %s", e)
if self.api_key:
try:
headers.update({"x-api-key": self.api_key})
+ # If the pebblo_resp is None,
+ # then the pebblo server version is not available
if pebblo_resp:
pebblo_server_version = json.loads(pebblo_resp.text).get(
"pebblo_server_version"
)
- payload.update(
- {
- "pebblo_server_version": pebblo_server_version,
- "pebblo_client_version": payload["plugin_version"],
- }
- )
- payload.pop("plugin_version")
+ payload.update({"pebblo_server_version": pebblo_server_version})
+
+ payload.update({"pebblo_client_version": PLUGIN_VERSION})
pebblo_cloud_url = f"{PEBBLO_CLOUD_URL}{APP_DISCOVER_URL}"
pebblo_cloud_response = requests.post(
pebblo_cloud_url, headers=headers, json=payload, timeout=20
diff --git a/libs/community/langchain_community/embeddings/__init__.py b/libs/community/langchain_community/embeddings/__init__.py
index 5b49744a49475..0b4c9c35639a0 100644
--- a/libs/community/langchain_community/embeddings/__init__.py
+++ b/libs/community/langchain_community/embeddings/__init__.py
@@ -22,6 +22,9 @@
from langchain_community.embeddings.anyscale import (
AnyscaleEmbeddings,
)
+ from langchain_community.embeddings.ascend import (
+ AscendEmbeddings,
+ )
from langchain_community.embeddings.awa import (
AwaEmbeddings,
)
@@ -236,6 +239,7 @@
"AlephAlphaAsymmetricSemanticEmbedding",
"AlephAlphaSymmetricSemanticEmbedding",
"AnyscaleEmbeddings",
+ "AscendEmbeddings",
"AwaEmbeddings",
"AzureOpenAIEmbeddings",
"BaichuanTextEmbeddings",
@@ -391,6 +395,7 @@
"TitanTakeoffEmbed": "langchain_community.embeddings.titan_takeoff",
"PremAIEmbeddings": "langchain_community.embeddings.premai",
"YandexGPTEmbeddings": "langchain_community.embeddings.yandex",
+ "AscendEmbeddings": "langchain_community.embeddings.ascend",
"ZhipuAIEmbeddings": "langchain_community.embeddings.zhipuai",
}
diff --git a/libs/community/langchain_community/embeddings/ascend.py b/libs/community/langchain_community/embeddings/ascend.py
new file mode 100644
index 0000000000000..4e71635663fa5
--- /dev/null
+++ b/libs/community/langchain_community/embeddings/ascend.py
@@ -0,0 +1,120 @@
+import os
+from typing import Any, Dict, List, Optional
+
+from langchain_core.embeddings import Embeddings
+from langchain_core.pydantic_v1 import BaseModel, root_validator
+
+
+class AscendEmbeddings(Embeddings, BaseModel):
+ """
+ Ascend NPU accelerate Embedding model
+
+ Please ensure that you have installed CANN and torch_npu.
+
+ Example:
+
+ from langchain_community.embeddings import AscendEmbeddings
+ model = AscendEmbeddings(model_path=,
+ device_id=0,
+ query_instruction="Represent this sentence for searching relevant passages: "
+ )
+ """
+
+ """model path"""
+ model_path: str
+ """Ascend NPU device id."""
+ device_id: int = 0
+ """Unstruntion to used for embedding query."""
+ query_instruction: str = ""
+ """Unstruntion to used for embedding document."""
+ document_instruction: str = ""
+ use_fp16: bool = True
+ pooling_method: Optional[str] = "cls"
+ model: Any
+ tokenizer: Any
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ try:
+ from transformers import AutoModel, AutoTokenizer
+ except ImportError as e:
+ raise ImportError(
+ "Unable to import transformers, please install with "
+ "`pip install -U transformers`."
+ ) from e
+ try:
+ self.model = AutoModel.from_pretrained(self.model_path).npu().eval()
+ self.tokenizer = AutoTokenizer.from_pretrained(self.model_path)
+ except Exception as e:
+ raise Exception(
+ f"Failed to load model [self.model_path], due to following error:{e}"
+ )
+
+ if self.use_fp16:
+ self.model.half()
+ self.encode([f"warmup {i} times" for i in range(10)])
+
+ @root_validator
+ def validate_environment(cls, values: Dict) -> Dict:
+ if not os.access(values["model_path"], os.F_OK):
+ raise FileNotFoundError(
+ f"Unabled to find valid model path in [{values['model_path']}]"
+ )
+ try:
+ import torch_npu
+ except ImportError:
+ raise ModuleNotFoundError("torch_npu not found, please install torch_npu")
+ except Exception as e:
+ raise e
+ try:
+ torch_npu.npu.set_device(values["device_id"])
+ except Exception as e:
+ raise Exception(f"set device failed due to {e}")
+ return values
+
+ def encode(self, sentences: Any) -> Any:
+ inputs = self.tokenizer(
+ sentences,
+ padding=True,
+ truncation=True,
+ return_tensors="pt",
+ max_length=512,
+ )
+ try:
+ import torch
+ except ImportError as e:
+ raise ImportError(
+ "Unable to import torch, please install with " "`pip install -U torch`."
+ ) from e
+ last_hidden_state = self.model(
+ inputs.input_ids.npu(), inputs.attention_mask.npu(), return_dict=True
+ ).last_hidden_state
+ tmp = self.pooling(last_hidden_state, inputs["attention_mask"].npu())
+ embeddings = torch.nn.functional.normalize(tmp, dim=-1)
+ return embeddings.cpu().detach().numpy()
+
+ def pooling(self, last_hidden_state: Any, attention_mask: Any = None) -> Any:
+ try:
+ import torch
+ except ImportError as e:
+ raise ImportError(
+ "Unable to import torch, please install with " "`pip install -U torch`."
+ ) from e
+ if self.pooling_method == "cls":
+ return last_hidden_state[:, 0]
+ elif self.pooling_method == "mean":
+ s = torch.sum(
+ last_hidden_state * attention_mask.unsqueeze(-1).float(), dim=-1
+ )
+ d = attention_mask.sum(dim=1, keepdim=True).float()
+ return s / d
+ else:
+ raise NotImplementedError(
+ f"Pooling method [{self.pooling_method}] not implemented"
+ )
+
+ def embed_documents(self, texts: List[str]) -> List[List[float]]:
+ return self.encode([self.document_instruction + text for text in texts])
+
+ def embed_query(self, text: str) -> List[float]:
+ return self.encode([self.query_instruction + text])[0]
diff --git a/libs/community/langchain_community/graphs/neo4j_graph.py b/libs/community/langchain_community/graphs/neo4j_graph.py
index cd2791d64621a..4897348a7b977 100644
--- a/libs/community/langchain_community/graphs/neo4j_graph.py
+++ b/libs/community/langchain_community/graphs/neo4j_graph.py
@@ -287,6 +287,10 @@ def _format_schema(schema: Dict, is_enhanced: bool) -> str:
)
+def _remove_backticks(text: str) -> str:
+ return text.replace("`", "")
+
+
class Neo4jGraph(GraphStore):
"""Neo4j database wrapper for various graph operations.
@@ -571,6 +575,9 @@ def add_graph_documents(
document.source.page_content.encode("utf-8")
).hexdigest()
+ # Remove backticks from node types
+ for node in document.nodes:
+ node.type = _remove_backticks(node.type)
# Import nodes
self.query(
node_import_query,
@@ -586,10 +593,12 @@ def add_graph_documents(
"data": [
{
"source": el.source.id,
- "source_label": el.source.type,
+ "source_label": _remove_backticks(el.source.type),
"target": el.target.id,
- "target_label": el.target.type,
- "type": el.type.replace(" ", "_").upper(),
+ "target_label": _remove_backticks(el.target.type),
+ "type": _remove_backticks(
+ el.type.replace(" ", "_").upper()
+ ),
"properties": el.properties,
}
for el in document.relationships
diff --git a/libs/community/langchain_community/llms/oci_generative_ai.py b/libs/community/langchain_community/llms/oci_generative_ai.py
index 2c2935cc76df3..178694656e48b 100644
--- a/libs/community/langchain_community/llms/oci_generative_ai.py
+++ b/libs/community/langchain_community/llms/oci_generative_ai.py
@@ -1,17 +1,53 @@
from __future__ import annotations
-from abc import ABC
+import json
+from abc import ABC, abstractmethod
from enum import Enum
-from typing import Any, Dict, List, Mapping, Optional
+from typing import Any, Dict, Iterator, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
+from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
from langchain_community.llms.utils import enforce_stop_tokens
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
-VALID_PROVIDERS = ("cohere", "meta")
+
+
+class Provider(ABC):
+ @property
+ @abstractmethod
+ def stop_sequence_key(self) -> str:
+ ...
+
+ @abstractmethod
+ def completion_response_to_text(self, response: Any) -> str:
+ ...
+
+
+class CohereProvider(Provider):
+ stop_sequence_key = "stop_sequences"
+
+ def __init__(self) -> None:
+ from oci.generative_ai_inference import models
+
+ self.llm_inference_request = models.CohereLlmInferenceRequest
+
+ def completion_response_to_text(self, response: Any) -> str:
+ return response.data.inference_response.generated_texts[0].text
+
+
+class MetaProvider(Provider):
+ stop_sequence_key = "stop"
+
+ def __init__(self) -> None:
+ from oci.generative_ai_inference import models
+
+ self.llm_inference_request = models.LlamaLlmInferenceRequest
+
+ def completion_response_to_text(self, response: Any) -> str:
+ return response.data.inference_response.choices[0].text
class OCIAuthType(Enum):
@@ -33,8 +69,8 @@ class OCIGenAIBase(BaseModel, ABC):
API_KEY,
SECURITY_TOKEN,
- INSTANCE_PRINCIPLE,
- RESOURCE_PRINCIPLE
+ INSTANCE_PRINCIPAL,
+ RESOURCE_PRINCIPAL
If not specified, API_KEY will be used
"""
@@ -65,11 +101,6 @@ class OCIGenAIBase(BaseModel, ABC):
is_stream: bool = False
"""Whether to stream back partial progress"""
- llm_stop_sequence_mapping: Mapping[str, str] = {
- "cohere": "stop_sequences",
- "meta": "stop",
- }
-
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that OCI config and python package exists in environment."""
@@ -121,24 +152,28 @@ def make_security_token_signer(oci_config): # type: ignore[no-untyped-def]
"signer"
] = oci.auth.signers.get_resource_principals_signer()
else:
- raise ValueError("Please provide valid value to auth_type")
+ raise ValueError(
+ "Please provide valid value to auth_type, "
+ f"{values['auth_type']} is not valid."
+ )
values["client"] = oci.generative_ai_inference.GenerativeAiInferenceClient(
**client_kwargs
)
except ImportError as ex:
- raise ImportError(
+ raise ModuleNotFoundError(
"Could not import oci python package. "
"Please make sure you have the oci package installed."
) from ex
except Exception as e:
raise ValueError(
- "Could not authenticate with OCI client. "
- "Please check if ~/.oci/config exists. "
- "If INSTANCE_PRINCIPLE or RESOURCE_PRINCIPLE is used, "
- "Please check the specified "
- "auth_profile and auth_type are valid."
+ """Could not authenticate with OCI client.
+ Please check if ~/.oci/config exists.
+ If INSTANCE_PRINCIPAL or RESOURCE_PRINCIPAL is used,
+ please check the specified
+ auth_profile and auth_type are valid.""",
+ e,
) from e
return values
@@ -151,19 +186,19 @@ def _identifying_params(self) -> Mapping[str, Any]:
**{"model_kwargs": _model_kwargs},
}
- def _get_provider(self) -> str:
+ def _get_provider(self, provider_map: Mapping[str, Any]) -> Any:
if self.provider is not None:
provider = self.provider
else:
provider = self.model_id.split(".")[0].lower()
- if provider not in VALID_PROVIDERS:
+ if provider not in provider_map:
raise ValueError(
f"Invalid provider derived from model_id: {self.model_id} "
"Please explicitly pass in the supported provider "
"when using custom endpoint"
)
- return provider
+ return provider_map[provider]
class OCIGenAI(LLM, OCIGenAIBase):
@@ -173,7 +208,7 @@ class OCIGenAI(LLM, OCIGenAIBase):
https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm
The authentifcation method is passed through auth_type and should be one of:
- API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPLE, RESOURCE_PRINCIPLE
+ API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPAL, RESOURCE_PRINCIPAL
Make sure you have the required policies (profile/roles) to
access the OCI Generative AI service.
@@ -204,21 +239,29 @@ class Config:
@property
def _llm_type(self) -> str:
"""Return type of llm."""
- return "oci"
+ return "oci_generative_ai_completion"
+
+ @property
+ def _provider_map(self) -> Mapping[str, Any]:
+ """Get the provider map"""
+ return {
+ "cohere": CohereProvider(),
+ "meta": MetaProvider(),
+ }
+
+ @property
+ def _provider(self) -> Any:
+ """Get the internal provider object"""
+ return self._get_provider(provider_map=self._provider_map)
def _prepare_invocation_object(
self, prompt: str, stop: Optional[List[str]], kwargs: Dict[str, Any]
) -> Dict[str, Any]:
from oci.generative_ai_inference import models
- oci_llm_request_mapping = {
- "cohere": models.CohereLlmInferenceRequest,
- "meta": models.LlamaLlmInferenceRequest,
- }
- provider = self._get_provider()
_model_kwargs = self.model_kwargs or {}
if stop is not None:
- _model_kwargs[self.llm_stop_sequence_mapping[provider]] = stop
+ _model_kwargs[self._provider.stop_sequence_key] = stop
if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):
serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id)
@@ -232,19 +275,13 @@ def _prepare_invocation_object(
invocation_obj = models.GenerateTextDetails(
compartment_id=self.compartment_id,
serving_mode=serving_mode,
- inference_request=oci_llm_request_mapping[provider](**inference_params),
+ inference_request=self._provider.llm_inference_request(**inference_params),
)
return invocation_obj
def _process_response(self, response: Any, stop: Optional[List[str]]) -> str:
- provider = self._get_provider()
- if provider == "cohere":
- text = response.data.inference_response.generated_texts[0].text
- elif provider == "meta":
- text = response.data.inference_response.choices[0].text
- else:
- raise ValueError(f"Invalid provider: {provider}")
+ text = self._provider.completion_response_to_text(response)
if stop is not None:
text = enforce_stop_tokens(text, stop)
@@ -272,7 +309,51 @@ def _call(
response = llm.invoke("Tell me a joke.")
"""
+ if self.is_stream:
+ text = ""
+ for chunk in self._stream(prompt, stop, run_manager, **kwargs):
+ text += chunk.text
+ if stop is not None:
+ text = enforce_stop_tokens(text, stop)
+ return text
invocation_obj = self._prepare_invocation_object(prompt, stop, kwargs)
response = self.client.generate_text(invocation_obj)
return self._process_response(response, stop)
+
+ def _stream(
+ self,
+ prompt: str,
+ stop: Optional[List[str]] = None,
+ run_manager: Optional[CallbackManagerForLLMRun] = None,
+ **kwargs: Any,
+ ) -> Iterator[GenerationChunk]:
+ """Stream OCIGenAI LLM on given prompt.
+
+ Args:
+ prompt: The prompt to pass into the model.
+ stop: Optional list of stop words to use when generating.
+
+ Returns:
+ An iterator of GenerationChunks.
+
+ Example:
+ .. code-block:: python
+
+ response = llm.stream("Tell me a joke.")
+ """
+
+ self.is_stream = True
+ invocation_obj = self._prepare_invocation_object(prompt, stop, kwargs)
+ response = self.client.generate_text(invocation_obj)
+
+ for event in response.data.events():
+ json_load = json.loads(event.data)
+ if "text" in json_load:
+ event_data_text = json_load["text"]
+ else:
+ event_data_text = ""
+ chunk = GenerationChunk(text=event_data_text)
+ if run_manager:
+ run_manager.on_llm_new_token(chunk.text, chunk=chunk)
+ yield chunk
diff --git a/libs/community/langchain_community/utilities/bing_search.py b/libs/community/langchain_community/utilities/bing_search.py
index 52c993526189a..807f4858a3fe4 100644
--- a/libs/community/langchain_community/utilities/bing_search.py
+++ b/libs/community/langchain_community/utilities/bing_search.py
@@ -5,6 +5,10 @@
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env
+# BING_SEARCH_ENDPOINT is the default endpoint for Bing Search API and is normally
+# invariant to users.
+BING_SEARCH_ENDPOINT = "https://api.bing.microsoft.com/v7.0/search"
+
class BingSearchAPIWrapper(BaseModel):
"""Wrapper for Bing Search API."""
@@ -52,7 +56,7 @@ def validate_environment(cls, values: Dict) -> Dict:
values,
"bing_search_url",
"BING_SEARCH_URL",
- # default="https://api.bing.microsoft.com/v7.0/search",
+ default=BING_SEARCH_ENDPOINT,
)
values["bing_search_url"] = bing_search_url
diff --git a/libs/community/langchain_community/utilities/pebblo.py b/libs/community/langchain_community/utilities/pebblo.py
index 9ed43630c723b..377155c71f4a3 100644
--- a/libs/community/langchain_community/utilities/pebblo.py
+++ b/libs/community/langchain_community/utilities/pebblo.py
@@ -63,93 +63,86 @@
class IndexedDocument(Document):
+ """Pebblo Indexed Document."""
+
id: str
+ """Unique ID of the document."""
class Runtime(BaseModel):
- """Pebblo Runtime.
-
- Args:
- type (Optional[str]): Runtime type. Defaults to ""
- host (str): Hostname of runtime.
- path (str): Current working directory path.
- ip (Optional[str]): Ip of current runtime. Defaults to ""
- platform (str): Platform details of current runtime.
- os (str): OS name.
- os_version (str): OS version.
- language (str): Runtime kernel.
- language_version (str): version of current runtime kernel.
- runtime (Optional[str]) More runtime details. Defaults to ""
- """
+ """Pebblo Runtime."""
type: str = "local"
+ """Runtime type. Defaults to 'local'."""
host: str
+ """Host name of the runtime."""
path: str
+ """Current working directory path."""
ip: Optional[str] = ""
+ """IP address of the runtime. Defaults to ''."""
platform: str
+ """Platform details of the runtime."""
os: str
+ """OS name."""
os_version: str
+ """OS version."""
language: str
+ """Runtime kernel."""
language_version: str
+ """Version of the runtime kernel."""
runtime: str = "local"
+ """More runtime details. Defaults to 'local'."""
class Framework(BaseModel):
- """Pebblo Framework instance.
-
- Args:
- name (str): Name of the Framework.
- version (str): Version of the Framework.
- """
+ """Pebblo Framework instance."""
name: str
+ """Name of the Framework."""
version: str
+ """Version of the Framework."""
class App(BaseModel):
- """Pebblo AI application.
-
- Args:
- name (str): Name of the app.
- owner (str): Owner of the app.
- description (Optional[str]): Description of the app.
- load_id (str): Unique load_id of the app instance.
- runtime (Runtime): Runtime details of app.
- framework (Framework): Framework details of the app
- plugin_version (str): Plugin version used for the app.
- """
+ """Pebblo AI application."""
name: str
+ """Name of the app."""
owner: str
+ """Owner of the app."""
description: Optional[str]
+ """Description of the app."""
load_id: str
+ """Unique load_id of the app instance."""
runtime: Runtime
+ """Runtime details of the app."""
framework: Framework
+ """Framework details of the app."""
plugin_version: str
+ """Plugin version used for the app."""
class Doc(BaseModel):
- """Pebblo document.
-
- Args:
- name (str): Name of app originating this document.
- owner (str): Owner of app.
- docs (list): List of documents with its metadata.
- plugin_version (str): Pebblo plugin Version
- load_id (str): Unique load_id of the app instance.
- loader_details (dict): Loader details with its metadata.
- loading_end (bool): Boolean, specifying end of loading of source.
- source_owner (str): Owner of the source of the loader.
- """
+ """Pebblo document."""
name: str
+ """Name of app originating this document."""
owner: str
+ """Owner of app."""
docs: list
+ """List of documents with its metadata."""
plugin_version: str
+ """Pebblo plugin Version"""
load_id: str
+ """Unique load_id of the app instance."""
loader_details: dict
+ """Loader details with its metadata."""
loading_end: bool
+ """Boolean, specifying end of loading of source."""
source_owner: str
+ """Owner of the source of the loader."""
+ classifier_location: str
+ """Location of the classifier."""
def get_full_path(path: str) -> str:
diff --git a/libs/community/pyproject.toml b/libs/community/pyproject.toml
index 53c279ce51b14..e160a3339c9ef 100644
--- a/libs/community/pyproject.toml
+++ b/libs/community/pyproject.toml
@@ -156,4 +156,4 @@ ignore-regex = '.*(Stati Uniti|Tense=Pres).*'
# whats is a typo but used frequently in queries so kept as is
# aapply - async apply
# unsecure - typo but part of API, decided to not bother for now
-ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin'
+ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin,cann'
diff --git a/libs/community/tests/integration_tests/graphs/test_neo4j.py b/libs/community/tests/integration_tests/graphs/test_neo4j.py
index a519b43070da7..761cbc95e4b8e 100644
--- a/libs/community/tests/integration_tests/graphs/test_neo4j.py
+++ b/libs/community/tests/integration_tests/graphs/test_neo4j.py
@@ -25,6 +25,20 @@
)
]
+test_data_backticks = [
+ GraphDocument(
+ nodes=[Node(id="foo", type="foo`"), Node(id="bar", type="`bar")],
+ relationships=[
+ Relationship(
+ source=Node(id="foo", type="f`oo"),
+ target=Node(id="bar", type="ba`r"),
+ type="`REL`",
+ )
+ ],
+ source=Document(page_content="source document"),
+ )
+]
+
def test_cypher_return_correct_schema() -> None:
"""Test that chain returns direct results."""
@@ -363,3 +377,24 @@ def test_enhanced_schema_exception() -> None:
# remove metadata portion of schema
del graph.structured_schema["metadata"]
assert graph.structured_schema == expected_output
+
+
+def test_backticks() -> None:
+ """Test that backticks are correctly removed."""
+ url = os.environ.get("NEO4J_URI")
+ username = os.environ.get("NEO4J_USERNAME")
+ password = os.environ.get("NEO4J_PASSWORD")
+ assert url is not None
+ assert username is not None
+ assert password is not None
+
+ graph = Neo4jGraph(url=url, username=username, password=password)
+ graph.query("MATCH (n) DETACH DELETE n")
+ graph.add_graph_documents(test_data_backticks)
+ nodes = graph.query("MATCH (n) RETURN labels(n) AS labels ORDER BY n.id")
+ rels = graph.query("MATCH ()-[r]->() RETURN type(r) AS type")
+ expected_nodes = [{"labels": ["bar"]}, {"labels": ["foo"]}]
+ expected_rels = [{"type": "REL"}]
+
+ assert nodes == expected_nodes
+ assert rels == expected_rels
diff --git a/libs/community/tests/unit_tests/chat_models/test_imports.py b/libs/community/tests/unit_tests/chat_models/test_imports.py
index a0e573068c0f6..3c9b5e22547f0 100644
--- a/libs/community/tests/unit_tests/chat_models/test_imports.py
+++ b/libs/community/tests/unit_tests/chat_models/test_imports.py
@@ -27,6 +27,7 @@
"ChatMlflow",
"ChatMLflowAIGateway",
"ChatMLX",
+ "ChatOCIGenAI",
"ChatOllama",
"ChatOpenAI",
"ChatPerplexity",
diff --git a/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py b/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py
new file mode 100644
index 0000000000000..b7d80d19c4e76
--- /dev/null
+++ b/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py
@@ -0,0 +1,105 @@
+"""Test OCI Generative AI LLM service"""
+from unittest.mock import MagicMock
+
+import pytest
+from langchain_core.messages import HumanMessage
+from pytest import MonkeyPatch
+
+from langchain_community.chat_models.oci_generative_ai import ChatOCIGenAI
+
+
+class MockResponseDict(dict):
+ def __getattr__(self, val): # type: ignore[no-untyped-def]
+ return self[val]
+
+
+@pytest.mark.requires("oci")
+@pytest.mark.parametrize(
+ "test_model_id", ["cohere.command-r-16k", "meta.llama-3-70b-instruct"]
+)
+def test_llm_chat(monkeypatch: MonkeyPatch, test_model_id: str) -> None:
+ """Test valid chat call to OCI Generative AI LLM service."""
+ oci_gen_ai_client = MagicMock()
+ llm = ChatOCIGenAI(model_id=test_model_id, client=oci_gen_ai_client)
+
+ provider = llm.model_id.split(".")[0].lower()
+
+ def mocked_response(*args): # type: ignore[no-untyped-def]
+ response_text = "Assistant chat reply."
+ response = None
+ if provider == "cohere":
+ response = MockResponseDict(
+ {
+ "status": 200,
+ "data": MockResponseDict(
+ {
+ "chat_response": MockResponseDict(
+ {
+ "text": response_text,
+ "finish_reason": "completed",
+ }
+ ),
+ "model_id": "cohere.command-r-16k",
+ "model_version": "1.0.0",
+ }
+ ),
+ "request_id": "1234567890",
+ "headers": MockResponseDict(
+ {
+ "content-length": "123",
+ }
+ ),
+ }
+ )
+ elif provider == "meta":
+ response = MockResponseDict(
+ {
+ "status": 200,
+ "data": MockResponseDict(
+ {
+ "chat_response": MockResponseDict(
+ {
+ "choices": [
+ MockResponseDict(
+ {
+ "message": MockResponseDict(
+ {
+ "content": [
+ MockResponseDict(
+ {
+ "text": response_text, # noqa: E501
+ }
+ )
+ ]
+ }
+ ),
+ "finish_reason": "completed",
+ }
+ )
+ ],
+ "time_created": "2024-09-01T00:00:00Z",
+ }
+ ),
+ "model_id": "cohere.command-r-16k",
+ "model_version": "1.0.0",
+ }
+ ),
+ "request_id": "1234567890",
+ "headers": MockResponseDict(
+ {
+ "content-length": "123",
+ }
+ ),
+ }
+ )
+ return response
+
+ monkeypatch.setattr(llm.client, "chat", mocked_response)
+
+ messages = [
+ HumanMessage(content="User message"),
+ ]
+
+ expected = "Assistant chat reply."
+ actual = llm.invoke(messages, temperature=0.2)
+ assert actual.content == expected
diff --git a/libs/community/tests/unit_tests/embeddings/test_imports.py b/libs/community/tests/unit_tests/embeddings/test_imports.py
index 7f991488f3b6f..7cbc1f6a331f5 100644
--- a/libs/community/tests/unit_tests/embeddings/test_imports.py
+++ b/libs/community/tests/unit_tests/embeddings/test_imports.py
@@ -78,6 +78,7 @@
"OpenVINOEmbeddings",
"OpenVINOBgeEmbeddings",
"SolarEmbeddings",
+ "AscendEmbeddings",
"ZhipuAIEmbeddings",
]
diff --git a/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py b/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py
index b1c36ec7a5fed..cc3599abe6c54 100644
--- a/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py
+++ b/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py
@@ -4,7 +4,7 @@
import pytest
from pytest import MonkeyPatch
-from langchain_community.llms import OCIGenAI
+from langchain_community.llms.oci_generative_ai import OCIGenAI
class MockResponseDict(dict):
@@ -16,12 +16,12 @@ def __getattr__(self, val): # type: ignore[no-untyped-def]
@pytest.mark.parametrize(
"test_model_id", ["cohere.command", "cohere.command-light", "meta.llama-2-70b-chat"]
)
-def test_llm_call(monkeypatch: MonkeyPatch, test_model_id: str) -> None:
- """Test valid call to OCI Generative AI LLM service."""
+def test_llm_complete(monkeypatch: MonkeyPatch, test_model_id: str) -> None:
+ """Test valid completion call to OCI Generative AI LLM service."""
oci_gen_ai_client = MagicMock()
llm = OCIGenAI(model_id=test_model_id, client=oci_gen_ai_client)
- provider = llm._get_provider()
+ provider = llm.model_id.split(".")[0].lower()
def mocked_response(*args): # type: ignore[no-untyped-def]
response_text = "This is the completion."
@@ -71,6 +71,5 @@ def mocked_response(*args): # type: ignore[no-untyped-def]
)
monkeypatch.setattr(llm.client, "generate_text", mocked_response)
-
output = llm.invoke("This is a prompt.", temperature=0.2)
assert output == "This is the completion."
diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py
index 6ea52daf4c768..4950302777fcb 100644
--- a/libs/core/langchain_core/runnables/base.py
+++ b/libs/core/langchain_core/runnables/base.py
@@ -1892,6 +1892,8 @@ def _transform_stream_with_config(
final_input_supported = False
else:
final_input = ichunk
+ except GeneratorExit:
+ run_manager.on_chain_end(final_output, inputs=final_input)
except BaseException as e:
run_manager.on_chain_error(e, inputs=final_input)
raise
diff --git a/libs/langchain/langchain/chains/llm.py b/libs/langchain/langchain/chains/llm.py
index 874cfa96faddf..e90578e4c8aab 100644
--- a/libs/langchain/langchain/chains/llm.py
+++ b/libs/langchain/langchain/chains/llm.py
@@ -38,7 +38,7 @@
@deprecated(
since="0.1.17",
alternative="RunnableSequence, e.g., `prompt | llm`",
- removal="0.3.0",
+ removal="1.0",
)
class LLMChain(Chain):
"""Chain to run queries against LLMs.
@@ -48,6 +48,7 @@ class LLMChain(Chain):
.. code-block:: python
+ from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import PromptTemplate
from langchain_openai import OpenAI
@@ -56,7 +57,7 @@ class LLMChain(Chain):
input_variables=["adjective"], template=prompt_template
)
llm = OpenAI()
- chain = prompt | llm
+ chain = prompt | llm | StrOutputParser()
chain.invoke("your adjective here")
diff --git a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/__init__.py b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/__init__.py
index f57b270c93746..7fcbdf207f36a 100644
--- a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/__init__.py
+++ b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/__init__.py
@@ -1,3 +1,5 @@
+"""This package provides tools for managing dynamic sessions in Azure."""
+
from langchain_azure_dynamic_sessions.tools.sessions import SessionsPythonREPLTool
__all__ = [
diff --git a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/__init__.py b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/__init__.py
index f57b270c93746..7fcbdf207f36a 100644
--- a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/__init__.py
+++ b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/__init__.py
@@ -1,3 +1,5 @@
+"""This package provides tools for managing dynamic sessions in Azure."""
+
from langchain_azure_dynamic_sessions.tools.sessions import SessionsPythonREPLTool
__all__ = [
diff --git a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/sessions.py b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/sessions.py
index 2315b281b32d5..b447ddb33d60c 100644
--- a/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/sessions.py
+++ b/libs/partners/azure-dynamic-sessions/langchain_azure_dynamic_sessions/tools/sessions.py
@@ -1,3 +1,9 @@
+"""This is the Azure Dynamic Sessions module.
+
+This module provides the SessionsPythonREPLTool class for
+managing dynamic sessions in Azure.
+"""
+
import importlib.metadata
import json
import os
@@ -27,7 +33,6 @@ def _access_token_provider_factory() -> Callable[[], Optional[str]]:
Returns:
Callable[[], Optional[str]]: The access token provider function
"""
-
access_token: Optional[AccessToken] = None
def access_token_provider() -> Optional[str]:
@@ -44,6 +49,7 @@ def access_token_provider() -> Optional[str]:
def _sanitize_input(query: str) -> str:
"""Sanitize input to the python REPL.
+
Remove whitespace, backtick & python (if llm mistakes python console as terminal)
Args:
@@ -52,7 +58,6 @@ def _sanitize_input(query: str) -> str:
Returns:
str: The sanitized query
"""
-
# Removes `, whitespace & python from start
query = re.sub(r"^(\s|`)*(?i:python)?\s*", "", query)
# Removes whitespace & ` from end
@@ -86,11 +91,11 @@ def from_dict(data: dict) -> "RemoteFileMetadata":
class SessionsPythonREPLTool(BaseTool):
- """A tool for running Python code in an Azure Container Apps dynamic sessions
- code interpreter.
+ """A tool for running Python code.
- Example:
+ Run python code in an Azure Container Apps dynamic sessions code interpreter.
+ Example:
.. code-block:: python
from langchain_azure_dynamic_sessions import SessionsPythonREPLTool
@@ -135,7 +140,6 @@ def _build_url(self, path: str) -> str:
def execute(self, python_code: str) -> Any:
"""Execute Python code in the session."""
-
if self.sanitize_input:
python_code = _sanitize_input(python_code)
@@ -197,7 +201,6 @@ def upload_file(
Returns:
RemoteFileMetadata: The metadata for the uploaded file
"""
-
if data and local_file_path:
raise ValueError("data and local_file_path cannot be provided together")
diff --git a/libs/partners/azure-dynamic-sessions/poetry.lock b/libs/partners/azure-dynamic-sessions/poetry.lock
index c5397390cd0b8..6ca09f0481e5a 100644
--- a/libs/partners/azure-dynamic-sessions/poetry.lock
+++ b/libs/partners/azure-dynamic-sessions/poetry.lock
@@ -1,14 +1,14 @@
-# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "annotated-types"
-version = "0.6.0"
+version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
files = [
- {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
- {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
+ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
[package.dependencies]
@@ -16,13 +16,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
[[package]]
name = "anyio"
-version = "4.3.0"
+version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
- {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
- {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
+ {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+ {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
]
[package.dependencies]
@@ -67,13 +67,13 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
[[package]]
name = "azure-core"
-version = "1.30.1"
+version = "1.30.2"
description = "Microsoft Azure Core Library for Python"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"},
- {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"},
+ {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"},
+ {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"},
]
[package.dependencies]
@@ -86,13 +86,13 @@ aio = ["aiohttp (>=3.0)"]
[[package]]
name = "azure-identity"
-version = "1.16.0"
+version = "1.17.1"
description = "Microsoft Azure Identity Library for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "azure-identity-1.16.0.tar.gz", hash = "sha256:6ff1d667cdcd81da1ceab42f80a0be63ca846629f518a922f7317a7e3c844e1b"},
- {file = "azure_identity-1.16.0-py3-none-any.whl", hash = "sha256:722fdb60b8fdd55fa44dc378b8072f4b419b56a5e54c0de391f644949f3a826f"},
+ {file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"},
+ {file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"},
]
[package.dependencies]
@@ -100,6 +100,7 @@ azure-core = ">=1.23.0"
cryptography = ">=2.5"
msal = ">=1.24.0"
msal-extensions = ">=0.3.0"
+typing-extensions = ">=4.0.0"
[[package]]
name = "backcall"
@@ -114,13 +115,13 @@ files = [
[[package]]
name = "certifi"
-version = "2024.2.2"
+version = "2024.6.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
- {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
+ {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
+ {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
]
[[package]]
@@ -288,13 +289,13 @@ files = [
[[package]]
name = "codespell"
-version = "2.2.6"
+version = "2.3.0"
description = "Codespell"
optional = false
python-versions = ">=3.8"
files = [
- {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"},
- {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"},
+ {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"},
+ {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"},
]
[package.extras]
@@ -333,43 +334,43 @@ test = ["pytest"]
[[package]]
name = "cryptography"
-version = "42.0.7"
+version = "42.0.8"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"},
- {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"},
- {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"},
- {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"},
- {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"},
- {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"},
- {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"},
- {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"},
- {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"},
- {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"},
- {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"},
- {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"},
- {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"},
- {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"},
- {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"},
- {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"},
- {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"},
- {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"},
- {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"},
- {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"},
- {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"},
- {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"},
- {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"},
- {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"},
- {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"},
- {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"},
- {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"},
- {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"},
- {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"},
- {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"},
- {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"},
- {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
+ {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
+ {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
+ {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
+ {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
+ {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
+ {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
]
[package.dependencies]
@@ -549,22 +550,22 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "7.1.0"
+version = "7.2.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"},
- {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"},
+ {file = "importlib_metadata-7.2.0-py3-none-any.whl", hash = "sha256:04e4aad329b8b948a5711d394fa8759cb80f009225441b4f2a02bd4d8e5f426c"},
+ {file = "importlib_metadata-7.2.0.tar.gz", hash = "sha256:3ff4519071ed42740522d494d04819b666541b9752c43012f85afb2cc220fcc6"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "iniconfig"
@@ -684,24 +685,24 @@ jsonpointer = ">=1.9"
[[package]]
name = "jsonpointer"
-version = "2.4"
+version = "3.0.0"
description = "Identify specific nodes in a JSON document (RFC 6901)"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
+python-versions = ">=3.7"
files = [
- {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"},
- {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"},
+ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
+ {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
]
[[package]]
name = "jupyter-client"
-version = "8.6.1"
+version = "8.6.2"
description = "Jupyter protocol implementation and client libraries"
optional = false
python-versions = ">=3.8"
files = [
- {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"},
- {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"},
+ {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"},
+ {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"},
]
[package.dependencies]
@@ -714,7 +715,7 @@ traitlets = ">=5.3"
[package.extras]
docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
-test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
+test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
[[package]]
name = "jupyter-core"
@@ -738,7 +739,7 @@ test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"
[[package]]
name = "langchain-core"
-version = "0.2.0rc1"
+version = "0.2.9"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@@ -747,14 +748,14 @@ develop = true
[package.dependencies]
jsonpatch = "^1.33"
-langsmith = "^0.1.0"
-packaging = "^23.2"
-pydantic = ">=1,<3"
+langsmith = "^0.1.75"
+packaging = ">=23.2,<25"
+pydantic = [
+ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
+ {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
+]
PyYAML = ">=5.3"
-tenacity = "^8.1.0"
-
-[package.extras]
-extended-testing = ["jinja2 (>=3,<4)"]
+tenacity = "^8.1.0,!=8.4.0"
[package.source]
type = "directory"
@@ -762,7 +763,7 @@ url = "../../core"
[[package]]
name = "langchain-openai"
-version = "0.1.7"
+version = "0.1.9"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = ">=3.8.1,<4.0"
@@ -770,9 +771,9 @@ files = []
develop = true
[package.dependencies]
-langchain-core = ">=0.1.46,<0.3"
-openai = "^1.24.0"
-tiktoken = ">=0.5.2,<1"
+langchain-core = ">=0.2.2,<0.3"
+openai = "^1.26.0"
+tiktoken = ">=0.7,<1"
[package.source]
type = "directory"
@@ -780,28 +781,29 @@ url = "../openai"
[[package]]
name = "langchainhub"
-version = "0.1.15"
+version = "0.1.20"
description = "The LangChain Hub API client"
optional = false
-python-versions = ">=3.8.1,<4.0"
+python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchainhub-0.1.15-py3-none-any.whl", hash = "sha256:89a0951abd1db255e91c6d545d092a598fc255aa865d1ffc3ce8f93bbeae60e7"},
- {file = "langchainhub-0.1.15.tar.gz", hash = "sha256:fa3ff81a31946860f84c119f1e2f6b7c7707e2bd7ed2394a7313b286d59f3bda"},
+ {file = "langchainhub-0.1.20-py3-none-any.whl", hash = "sha256:b3cbb5b2d7d6f9c3f89748bcc74424d8030ed4ebca58b5f44e0b6d9f111e33eb"},
+ {file = "langchainhub-0.1.20.tar.gz", hash = "sha256:499fa8958233071f35750987f325005d16241bebd455163955b607c195c37f25"},
]
[package.dependencies]
+packaging = ">=23.2,<25"
requests = ">=2,<3"
types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
-version = "0.1.58"
+version = "0.1.81"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.58-py3-none-any.whl", hash = "sha256:1148cc836ec99d1b2f37cd2fa3014fcac213bb6bad798a2b21bb9111c18c9768"},
- {file = "langsmith-0.1.58.tar.gz", hash = "sha256:a5060933c1fb3006b498ec849677993329d7e6138bdc2ec044068ab806e09c39"},
+ {file = "langsmith-0.1.81-py3-none-any.whl", hash = "sha256:3251d823225eef23ee541980b9d9e506367eabbb7f985a086b5d09e8f78ba7e9"},
+ {file = "langsmith-0.1.81.tar.gz", hash = "sha256:585ef3a2251380bd2843a664c9a28da4a7d28432e3ee8bcebf291ffb8e1f0af0"},
]
[package.dependencies]
@@ -825,22 +827,22 @@ traitlets = "*"
[[package]]
name = "msal"
-version = "1.28.0"
+version = "1.29.0"
description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect."
optional = false
python-versions = ">=3.7"
files = [
- {file = "msal-1.28.0-py3-none-any.whl", hash = "sha256:3064f80221a21cd535ad8c3fafbb3a3582cd9c7e9af0bb789ae14f726a0ca99b"},
- {file = "msal-1.28.0.tar.gz", hash = "sha256:80bbabe34567cb734efd2ec1869b2d98195c927455369d8077b3c542088c5c9d"},
+ {file = "msal-1.29.0-py3-none-any.whl", hash = "sha256:6b301e63f967481f0cc1a3a3bac0cf322b276855bc1b0955468d9deb3f33d511"},
+ {file = "msal-1.29.0.tar.gz", hash = "sha256:8f6725f099752553f9b2fe84125e2a5ebe47b49f92eacca33ebedd3a9ebaae25"},
]
[package.dependencies]
-cryptography = ">=0.6,<45"
+cryptography = ">=2.5,<45"
PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]}
requests = ">=2.0.0,<3"
[package.extras]
-broker = ["pymsalruntime (>=0.13.2,<0.15)"]
+broker = ["pymsalruntime (>=0.13.2,<0.17)"]
[[package]]
name = "msal-extensions"
@@ -935,13 +937,13 @@ files = [
[[package]]
name = "openai"
-version = "1.30.1"
+version = "1.35.3"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.30.1-py3-none-any.whl", hash = "sha256:c9fb3c3545c118bbce8deb824397b9433a66d0d0ede6a96f7009c95b76de4a46"},
- {file = "openai-1.30.1.tar.gz", hash = "sha256:4f85190e577cba0b066e1950b8eb9b11d25bc7ebcc43a86b326ce1bfa564ec74"},
+ {file = "openai-1.35.3-py3-none-any.whl", hash = "sha256:7b26544cef80f125431c073ffab3811d2421fbb9e30d3bd5c2436aba00b042d5"},
+ {file = "openai-1.35.3.tar.gz", hash = "sha256:d6177087f150b381d49499be782d764213fdf638d391b29ca692b84dd675a389"},
]
[package.dependencies]
@@ -958,68 +960,68 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
[[package]]
name = "orjson"
-version = "3.10.3"
+version = "3.10.5"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"},
- {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"},
- {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"},
- {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"},
- {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"},
- {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"},
- {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"},
- {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"},
- {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"},
- {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"},
- {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"},
- {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"},
- {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"},
- {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"},
- {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"},
- {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"},
- {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"},
- {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"},
- {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"},
- {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"},
- {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"},
- {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"},
- {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"},
- {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"},
- {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"},
- {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"},
+ {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
+ {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
+ {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
+ {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
+ {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
+ {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
+ {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
+ {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
+ {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
+ {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
+ {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
+ {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
+ {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
+ {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
+ {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
+ {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
+ {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
+ {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
+ {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
+ {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
+ {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
+ {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
+ {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
+ {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
+ {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
+ {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
]
[[package]]
name = "packaging"
-version = "23.2"
+version = "24.1"
description = "Core utilities for Python packages"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
- {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
@@ -1114,13 +1116,13 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p
[[package]]
name = "prompt-toolkit"
-version = "3.0.43"
+version = "3.0.47"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
- {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
+ {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"},
+ {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"},
]
[package.dependencies]
@@ -1128,27 +1130,28 @@ wcwidth = "*"
[[package]]
name = "psutil"
-version = "5.9.8"
+version = "6.0.0"
description = "Cross-platform lib for process and system monitoring in Python."
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
-files = [
- {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"},
- {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"},
- {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"},
- {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"},
- {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"},
- {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"},
- {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"},
- {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"},
- {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"},
- {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"},
- {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"},
- {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"},
- {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"},
- {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"},
- {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"},
- {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"},
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"},
+ {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"},
+ {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"},
+ {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"},
+ {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"},
+ {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"},
+ {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"},
+ {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"},
+ {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"},
+ {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"},
+ {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"},
+ {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"},
+ {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"},
+ {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"},
+ {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"},
+ {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"},
+ {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"},
]
[package.extras]
@@ -1192,18 +1195,18 @@ files = [
[[package]]
name = "pydantic"
-version = "2.7.1"
+version = "2.7.4"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
- {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
+ {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
+ {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
]
[package.dependencies]
annotated-types = ">=0.4.0"
-pydantic-core = "2.18.2"
+pydantic-core = "2.18.4"
typing-extensions = ">=4.6.1"
[package.extras]
@@ -1211,90 +1214,90 @@ email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
-version = "2.18.2"
+version = "2.18.4"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
- {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
- {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
- {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
- {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
- {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
- {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
- {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
- {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
- {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
- {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
- {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
- {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
- {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
- {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
- {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
- {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
- {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
- {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
- {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
- {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"},
- {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"},
- {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"},
- {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"},
- {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"},
- {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"},
- {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"},
- {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"},
- {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"},
- {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"},
- {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"},
- {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
- {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"},
+ {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"},
+ {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"},
+ {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"},
+ {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"},
+ {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"},
+ {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"},
+ {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"},
+ {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"},
+ {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"},
+ {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"},
+ {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"},
+ {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"},
+ {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"},
]
[package.dependencies]
@@ -1707,13 +1710,13 @@ files = [
[[package]]
name = "requests"
-version = "2.31.0"
+version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
- {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
@@ -1809,13 +1812,13 @@ pytest = ">=7.0.0,<9.0.0"
[[package]]
name = "tenacity"
-version = "8.3.0"
+version = "8.4.1"
description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.8"
files = [
- {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
- {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
+ {file = "tenacity-8.4.1-py3-none-any.whl", hash = "sha256:28522e692eda3e1b8f5e99c51464efcc0b9fc86933da92415168bc1c4e2308fa"},
+ {file = "tenacity-8.4.1.tar.gz", hash = "sha256:54b1412b878ddf7e1f1577cd49527bad8cdef32421bd599beac0c6c3f10582fd"},
]
[package.extras]
@@ -1887,22 +1890,22 @@ files = [
[[package]]
name = "tornado"
-version = "6.4"
+version = "6.4.1"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
optional = false
-python-versions = ">= 3.8"
+python-versions = ">=3.8"
files = [
- {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"},
- {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"},
- {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"},
- {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"},
- {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"},
- {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"},
- {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"},
- {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"},
- {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"},
- {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"},
- {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"},
+ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"},
+ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"},
+ {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"},
+ {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"},
+ {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"},
]
[[package]]
@@ -1942,13 +1945,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,
[[package]]
name = "types-requests"
-version = "2.31.0.20240406"
+version = "2.32.0.20240622"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"},
- {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"},
+ {file = "types-requests-2.32.0.20240622.tar.gz", hash = "sha256:ed5e8a412fcc39159d6319385c009d642845f250c63902718f605cd90faade31"},
+ {file = "types_requests-2.32.0.20240622-py3-none-any.whl", hash = "sha256:97bac6b54b5bd4cf91d407e62f0932a74821bc2211f22116d9ee1dd643826caf"},
]
[package.dependencies]
@@ -1956,24 +1959,24 @@ urllib3 = ">=2"
[[package]]
name = "typing-extensions"
-version = "4.11.0"
+version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "urllib3"
-version = "2.2.1"
+version = "2.2.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
- {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
- {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
+ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
+ {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
]
[package.extras]
@@ -1984,40 +1987,43 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "watchdog"
-version = "4.0.0"
+version = "4.0.1"
description = "Filesystem events monitoring"
optional = false
python-versions = ">=3.8"
files = [
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"},
- {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"},
- {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"},
- {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"},
- {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"},
- {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"},
- {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"},
- {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"},
+ {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
+ {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
+ {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
+ {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
+ {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
+ {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"},
+ {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"},
+ {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"},
+ {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"},
+ {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"},
]
[package.extras]
@@ -2036,18 +2042,18 @@ files = [
[[package]]
name = "zipp"
-version = "3.18.1"
+version = "3.19.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
- {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
+ {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
+ {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[metadata]
lock-version = "2.0"
diff --git a/libs/partners/azure-dynamic-sessions/pyproject.toml b/libs/partners/azure-dynamic-sessions/pyproject.toml
index a1ead73f684ba..d9ffba94823e3 100644
--- a/libs/partners/azure-dynamic-sessions/pyproject.toml
+++ b/libs/partners/azure-dynamic-sessions/pyproject.toml
@@ -64,13 +64,21 @@ ipykernel = "^6.29.4"
langchain-openai = { path = "../openai", develop = true }
langchainhub = "^0.1.15"
-[tool.ruff]
+[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
+ "D", # pydocstyle
+
]
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[tool.ruff.lint.per-file-ignores]
+"tests/**" = ["D"] # ignore docstring checks for tests
+
[tool.mypy]
disallow_untyped_defs = "True"
diff --git a/libs/partners/azure-dynamic-sessions/scripts/check_imports.py b/libs/partners/azure-dynamic-sessions/scripts/check_imports.py
index fd21a4975b7f0..336c05e5282f2 100644
--- a/libs/partners/azure-dynamic-sessions/scripts/check_imports.py
+++ b/libs/partners/azure-dynamic-sessions/scripts/check_imports.py
@@ -1,3 +1,5 @@
+"""This module checks for specific import statements in the codebase."""
+
import sys
import traceback
from importlib.machinery import SourceFileLoader
diff --git a/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py b/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py
index 38c3a9271e3cd..d50538b3c0ac8 100644
--- a/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py
+++ b/libs/partners/mongodb/langchain_mongodb/chat_message_histories.py
@@ -1,6 +1,6 @@
import json
import logging
-from typing import List
+from typing import Dict, List, Optional
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import (
@@ -14,6 +14,8 @@
DEFAULT_DBNAME = "chat_history"
DEFAULT_COLLECTION_NAME = "message_store"
+DEFAULT_SESSION_ID_KEY = "SessionId"
+DEFAULT_HISTORY_KEY = "History"
class MongoDBChatMessageHistory(BaseChatMessageHistory):
@@ -25,6 +27,10 @@ class MongoDBChatMessageHistory(BaseChatMessageHistory):
of a single chat session.
database_name: name of the database to use
collection_name: name of the collection to use
+ session_id_key: name of the field that stores the session id
+ history_key: name of the field that stores the chat history
+ create_index: whether to create an index on the session id field
+ index_kwargs: additional keyword arguments to pass to the index creation
"""
def __init__(
@@ -33,11 +39,18 @@ def __init__(
session_id: str,
database_name: str = DEFAULT_DBNAME,
collection_name: str = DEFAULT_COLLECTION_NAME,
+ *,
+ session_id_key: str = DEFAULT_SESSION_ID_KEY,
+ history_key: str = DEFAULT_HISTORY_KEY,
+ create_index: bool = True,
+ index_kwargs: Optional[Dict] = None,
):
self.connection_string = connection_string
self.session_id = session_id
self.database_name = database_name
self.collection_name = collection_name
+ self.session_id_key = session_id_key
+ self.history_key = history_key
try:
self.client: MongoClient = MongoClient(connection_string)
@@ -46,18 +59,21 @@ def __init__(
self.db = self.client[database_name]
self.collection = self.db[collection_name]
- self.collection.create_index("SessionId")
+
+ if create_index:
+ index_kwargs = index_kwargs or {}
+ self.collection.create_index(self.session_id_key, **index_kwargs)
@property
def messages(self) -> List[BaseMessage]: # type: ignore
"""Retrieve the messages from MongoDB"""
try:
- cursor = self.collection.find({"SessionId": self.session_id})
+ cursor = self.collection.find({self.session_id_key: self.session_id})
except errors.OperationFailure as error:
logger.error(error)
if cursor:
- items = [json.loads(document["History"]) for document in cursor]
+ items = [json.loads(document[self.history_key]) for document in cursor]
else:
items = []
@@ -69,8 +85,8 @@ def add_message(self, message: BaseMessage) -> None:
try:
self.collection.insert_one(
{
- "SessionId": self.session_id,
- "History": json.dumps(message_to_dict(message)),
+ self.session_id_key: self.session_id,
+ self.history_key: json.dumps(message_to_dict(message)),
}
)
except errors.WriteError as err:
@@ -79,6 +95,6 @@ def add_message(self, message: BaseMessage) -> None:
def clear(self) -> None:
"""Clear session memory from MongoDB"""
try:
- self.collection.delete_many({"SessionId": self.session_id})
+ self.collection.delete_many({self.session_id_key: self.session_id})
except errors.WriteError as err:
logger.error(err)
diff --git a/libs/partners/mongodb/poetry.lock b/libs/partners/mongodb/poetry.lock
index 6dd7637ccd0a7..d4a9c9e68e00c 100644
--- a/libs/partners/mongodb/poetry.lock
+++ b/libs/partners/mongodb/poetry.lock
@@ -546,7 +546,7 @@ files = [
[[package]]
name = "langchain"
-version = "0.2.3"
+version = "0.2.5"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@@ -556,7 +556,7 @@ develop = true
[package.dependencies]
aiohttp = "^3.8.3"
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
-langchain-core = "^0.2.0"
+langchain-core = "^0.2.7"
langchain-text-splitters = "^0.2.0"
langsmith = "^0.1.17"
numpy = [
@@ -567,7 +567,7 @@ pydantic = ">=1,<3"
PyYAML = ">=5.3"
requests = "^2"
SQLAlchemy = ">=1.4,<3"
-tenacity = "^8.1.0"
+tenacity = "^8.1.0,!=8.4.0"
[package.source]
type = "directory"
@@ -575,7 +575,7 @@ url = "../../langchain"
[[package]]
name = "langchain-core"
-version = "0.2.5"
+version = "0.2.9"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@@ -586,9 +586,12 @@ develop = true
jsonpatch = "^1.33"
langsmith = "^0.1.75"
packaging = ">=23.2,<25"
-pydantic = ">=1,<3"
+pydantic = [
+ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
+ {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
+]
PyYAML = ">=5.3"
-tenacity = "^8.1.0"
+tenacity = "^8.1.0,!=8.4.0"
[package.source]
type = "directory"
@@ -968,6 +971,25 @@ typing-extensions = ">=4.6.1"
[package.extras]
email = ["email-validator (>=2.0.0)"]
+[[package]]
+name = "pydantic"
+version = "2.7.4"
+description = "Data validation using Python type hints"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
+ {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.4.0"
+pydantic-core = "2.18.4"
+typing-extensions = ">=4.6.1"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+
[[package]]
name = "pydantic-core"
version = "2.18.2"
@@ -1059,6 +1081,97 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+[[package]]
+name = "pydantic-core"
+version = "2.18.4"
+description = "Core functionality for Pydantic validation and serialization"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"},
+ {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"},
+ {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"},
+ {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"},
+ {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"},
+ {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"},
+ {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"},
+ {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"},
+ {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"},
+ {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"},
+ {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"},
+ {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"},
+ {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"},
+ {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"},
+ {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"},
+ {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"},
+ {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"},
+ {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"},
+ {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"},
+ {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"},
+ {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
[[package]]
name = "pymongo"
version = "4.7.2"
diff --git a/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py b/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py
index 2c1889a43ad49..9c89c6d4243d1 100644
--- a/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py
+++ b/libs/partners/mongodb/tests/unit_tests/test_chat_message_histories.py
@@ -13,6 +13,8 @@ def __init__(self) -> None:
self.database_name = "test-database"
self.collection_name = "test-collection"
self.collection = MockCollection()
+ self.session_id_key = "SessionId"
+ self.history_key = "History"
def test_memory_with_message_store() -> None:
diff --git a/libs/partners/together/langchain_together/__init__.py b/libs/partners/together/langchain_together/__init__.py
index 2547fc80f7425..f4a23d35c0f44 100644
--- a/libs/partners/together/langchain_together/__init__.py
+++ b/libs/partners/together/langchain_together/__init__.py
@@ -1,3 +1,5 @@
+"""This package provides the Together integration for LangChain."""
+
from langchain_together.chat_models import ChatTogether
from langchain_together.embeddings import TogetherEmbeddings
from langchain_together.llms import Together
diff --git a/libs/partners/together/langchain_together/chat_models.py b/libs/partners/together/langchain_together/chat_models.py
index 0ecf9fd57b5ed..a3c5d604a6217 100644
--- a/libs/partners/together/langchain_together/chat_models.py
+++ b/libs/partners/together/langchain_together/chat_models.py
@@ -35,14 +35,24 @@ class ChatTogether(BaseChatOpenAI):
@property
def lc_secrets(self) -> Dict[str, str]:
+ """A map of constructor argument names to secret ids.
+
+ For example,
+ {"together_api_key": "TOGETHER_API_KEY"}
+ """
return {"together_api_key": "TOGETHER_API_KEY"}
@classmethod
def get_lc_namespace(cls) -> List[str]:
+ """Get the namespace of the langchain object."""
return ["langchain", "chat_models", "together"]
@property
def lc_attributes(self) -> Dict[str, Any]:
+ """List of attribute names that should be included in the serialized kwargs.
+
+ These attributes must be accepted by the constructor.
+ """
attributes: Dict[str, Any] = {}
if self.together_api_base:
diff --git a/libs/partners/together/langchain_together/embeddings.py b/libs/partners/together/langchain_together/embeddings.py
index b3dfb337b00a1..c19a76e1a063d 100644
--- a/libs/partners/together/langchain_together/embeddings.py
+++ b/libs/partners/together/langchain_together/embeddings.py
@@ -111,6 +111,8 @@ class TogetherEmbeddings(BaseModel, Embeddings):
http_client as well if you'd like a custom client for sync invocations."""
class Config:
+ """Configuration for this pydantic object."""
+
extra = Extra.forbid
allow_population_by_field_name = True
@@ -143,7 +145,6 @@ def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
-
together_api_key = get_from_dict_or_env(
values, "together_api_key", "TOGETHER_API_KEY"
)
diff --git a/libs/partners/together/langchain_together/llms.py b/libs/partners/together/langchain_together/llms.py
index 3daafc67691e2..ed0bebe96db3e 100644
--- a/libs/partners/together/langchain_together/llms.py
+++ b/libs/partners/together/langchain_together/llms.py
@@ -85,8 +85,8 @@ def validate_environment(cls, values: Dict) -> Dict:
@root_validator()
def validate_max_tokens(cls, values: Dict) -> Dict:
- """
- The v1 completions endpoint, has max_tokens as required parameter.
+ """The v1 completions endpoint, has max_tokens as required parameter.
+
Set a default value and warn if the parameter is missing.
"""
if values.get("max_tokens") is None:
@@ -108,6 +108,11 @@ def _format_output(self, output: dict) -> str:
@property
def default_params(self) -> Dict[str, Any]:
+ """Return the default parameters for the Together model.
+
+ Returns:
+ A dictionary containing the default parameters.
+ """
return {
"model": self.model,
"temperature": self.temperature,
@@ -128,11 +133,13 @@ def _call(
Args:
prompt: The prompt to pass into the model.
+ stop: Optional list of stop words to use when generating.
+ run_manager: The CallbackManager for LLM run, it's not used at the moment.
+ **kwargs: Additional parameters to pass to the model.
Returns:
The string generated by the model..
"""
-
headers = {
"Authorization": f"Bearer {self.together_api_key.get_secret_value()}",
"Content-Type": "application/json",
@@ -176,6 +183,9 @@ async def _acall(
Args:
prompt: The prompt to pass into the model.
+ stop: Optional list of stop words to use when generating.
+ run_manager: The CallbackManager for LLM run, it's not used at the moment.
+ **kwargs: Additional parameters to pass to the model.
Returns:
The string generated by the model.
diff --git a/libs/partners/together/poetry.lock b/libs/partners/together/poetry.lock
index 10dc66544ab63..66a232f43ac39 100644
--- a/libs/partners/together/poetry.lock
+++ b/libs/partners/together/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "aiohttp"
@@ -121,15 +121,18 @@ files = [
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
+
[[package]]
name = "anyio"
-version = "4.3.0"
+version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
- {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
- {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
+ {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+ {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
]
[package.dependencies]
@@ -175,13 +178,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p
[[package]]
name = "certifi"
-version = "2024.2.2"
+version = "2024.6.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
- {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
+ {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
+ {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
]
[[package]]
@@ -285,13 +288,13 @@ files = [
[[package]]
name = "codespell"
-version = "2.2.6"
+version = "2.3.0"
description = "Codespell"
optional = false
python-versions = ">=3.8"
files = [
- {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"},
- {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"},
+ {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"},
+ {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"},
]
[package.extras]
@@ -566,13 +569,13 @@ jsonpointer = ">=1.9"
[[package]]
name = "jsonpointer"
-version = "2.4"
+version = "3.0.0"
description = "Identify specific nodes in a JSON document (RFC 6901)"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
+python-versions = ">=3.7"
files = [
- {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"},
- {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"},
+ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
+ {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
]
[[package]]
@@ -601,7 +604,7 @@ url = "../../core"
[[package]]
name = "langchain-openai"
-version = "0.1.8"
+version = "0.1.9"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = ">=3.8.1,<4.0"
@@ -637,13 +640,13 @@ url = "../../standard-tests"
[[package]]
name = "langsmith"
-version = "0.1.77"
+version = "0.1.81"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.77-py3-none-any.whl", hash = "sha256:2202cc21b1ed7e7b9e5d2af2694be28898afa048c09fdf09f620cbd9301755ae"},
- {file = "langsmith-0.1.77.tar.gz", hash = "sha256:4ace09077a9a4e412afeb4b517ca68e7de7b07f36e4792dc8236ac5207c0c0c7"},
+ {file = "langsmith-0.1.81-py3-none-any.whl", hash = "sha256:3251d823225eef23ee541980b9d9e506367eabbb7f985a086b5d09e8f78ba7e9"},
+ {file = "langsmith-0.1.81.tar.gz", hash = "sha256:585ef3a2251380bd2843a664c9a28da4a7d28432e3ee8bcebf291ffb8e1f0af0"},
]
[package.dependencies]
@@ -930,13 +933,13 @@ files = [
[[package]]
name = "openai"
-version = "1.30.1"
+version = "1.35.3"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.30.1-py3-none-any.whl", hash = "sha256:c9fb3c3545c118bbce8deb824397b9433a66d0d0ede6a96f7009c95b76de4a46"},
- {file = "openai-1.30.1.tar.gz", hash = "sha256:4f85190e577cba0b066e1950b8eb9b11d25bc7ebcc43a86b326ce1bfa564ec74"},
+ {file = "openai-1.35.3-py3-none-any.whl", hash = "sha256:7b26544cef80f125431c073ffab3811d2421fbb9e30d3bd5c2436aba00b042d5"},
+ {file = "openai-1.35.3.tar.gz", hash = "sha256:d6177087f150b381d49499be782d764213fdf638d391b29ca692b84dd675a389"},
]
[package.dependencies]
@@ -953,68 +956,68 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
[[package]]
name = "orjson"
-version = "3.10.3"
+version = "3.10.5"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"},
- {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"},
- {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"},
- {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"},
- {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"},
- {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"},
- {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"},
- {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"},
- {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"},
- {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"},
- {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"},
- {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"},
- {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"},
- {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"},
- {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"},
- {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"},
- {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"},
- {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"},
- {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"},
- {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"},
- {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"},
- {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"},
- {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"},
- {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"},
- {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"},
- {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"},
- {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"},
- {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"},
- {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"},
- {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"},
- {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"},
+ {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
+ {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
+ {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
+ {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
+ {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
+ {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
+ {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
+ {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
+ {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
+ {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
+ {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
+ {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
+ {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
+ {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
+ {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
+ {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
+ {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
+ {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
+ {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
+ {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
+ {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
+ {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
+ {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
+ {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
+ {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
+ {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
+ {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
+ {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
+ {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
+ {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
+ {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
]
[[package]]
name = "packaging"
-version = "23.2"
+version = "24.1"
description = "Core utilities for Python packages"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
- {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
@@ -1032,58 +1035,6 @@ files = [
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
-[[package]]
-name = "pydantic"
-version = "1.10.15"
-description = "Data validation and settings management using python type hints"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"},
- {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"},
- {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"},
- {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"},
- {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"},
- {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"},
- {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"},
- {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"},
- {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"},
- {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"},
- {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"},
- {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"},
- {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"},
- {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"},
- {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"},
- {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"},
- {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"},
- {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"},
- {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"},
- {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"},
- {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"},
- {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"},
- {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"},
- {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"},
- {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"},
- {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"},
- {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"},
- {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"},
- {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"},
- {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"},
- {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"},
- {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"},
- {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"},
- {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"},
- {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"},
- {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"},
-]
-
-[package.dependencies]
-typing-extensions = ">=4.2.0"
-
-[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
-
[[package]]
name = "pydantic"
version = "2.7.4"
@@ -1444,13 +1395,13 @@ files = [
[[package]]
name = "requests"
-version = "2.31.0"
+version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
- {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
@@ -1546,13 +1497,13 @@ pytest = ">=7.0.0,<9.0.0"
[[package]]
name = "tenacity"
-version = "8.3.0"
+version = "8.4.1"
description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.8"
files = [
- {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
- {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
+ {file = "tenacity-8.4.1-py3-none-any.whl", hash = "sha256:28522e692eda3e1b8f5e99c51464efcc0b9fc86933da92415168bc1c4e2308fa"},
+ {file = "tenacity-8.4.1.tar.gz", hash = "sha256:54b1412b878ddf7e1f1577cd49527bad8cdef32421bd599beac0c6c3f10582fd"},
]
[package.extras]
@@ -1644,13 +1595,13 @@ telegram = ["requests"]
[[package]]
name = "types-requests"
-version = "2.31.0.20240406"
+version = "2.32.0.20240622"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"},
- {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"},
+ {file = "types-requests-2.32.0.20240622.tar.gz", hash = "sha256:ed5e8a412fcc39159d6319385c009d642845f250c63902718f605cd90faade31"},
+ {file = "types_requests-2.32.0.20240622-py3-none-any.whl", hash = "sha256:97bac6b54b5bd4cf91d407e62f0932a74821bc2211f22116d9ee1dd643826caf"},
]
[package.dependencies]
@@ -1658,13 +1609,13 @@ urllib3 = ">=2"
[[package]]
name = "typing-extensions"
-version = "4.11.0"
+version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
@@ -1684,13 +1635,13 @@ typing-extensions = ">=3.7.4"
[[package]]
name = "urllib3"
-version = "2.2.1"
+version = "2.2.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
- {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
- {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
+ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
+ {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
]
[package.extras]
@@ -1701,40 +1652,43 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "watchdog"
-version = "4.0.0"
+version = "4.0.1"
description = "Filesystem events monitoring"
optional = false
python-versions = ">=3.8"
files = [
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"},
- {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"},
- {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"},
- {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"},
- {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"},
- {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"},
- {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"},
- {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"},
- {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"},
- {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"},
- {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"},
- {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"},
- {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"},
- {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"},
+ {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"},
+ {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"},
+ {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"},
+ {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"},
+ {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"},
+ {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
+ {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
+ {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
+ {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
+ {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
+ {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"},
+ {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"},
+ {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"},
+ {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"},
+ {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"},
+ {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"},
]
[package.extras]
diff --git a/libs/partners/together/pyproject.toml b/libs/partners/together/pyproject.toml
index 0edea39fa984f..2f9caf3ed700c 100644
--- a/libs/partners/together/pyproject.toml
+++ b/libs/partners/together/pyproject.toml
@@ -69,13 +69,20 @@ optional = true
[tool.poetry.group.dev.dependencies]
langchain-core = { path = "../../core", develop = true }
-[tool.ruff]
+[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
+ "D", # pydocstyle
]
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[tool.ruff.lint.per-file-ignores]
+"tests/**" = ["D"] # ignore docstring checks for tests
+
[tool.mypy]
disallow_untyped_defs = "True"
diff --git a/libs/partners/together/scripts/check_imports.py b/libs/partners/together/scripts/check_imports.py
index fd21a4975b7f0..aa917add8d372 100644
--- a/libs/partners/together/scripts/check_imports.py
+++ b/libs/partners/together/scripts/check_imports.py
@@ -1,3 +1,5 @@
+"""This module checks if the given python files can be imported without error."""
+
import sys
import traceback
from importlib.machinery import SourceFileLoader
diff --git a/libs/standard-tests/langchain_standard_tests/integration_tests/base_store.py b/libs/standard-tests/langchain_standard_tests/integration_tests/base_store.py
new file mode 100644
index 0000000000000..8f74d066a45d6
--- /dev/null
+++ b/libs/standard-tests/langchain_standard_tests/integration_tests/base_store.py
@@ -0,0 +1,276 @@
+from abc import ABC, abstractmethod
+from typing import AsyncGenerator, Generator, Generic, Tuple, TypeVar
+
+import pytest
+from langchain_core.stores import BaseStore
+
+V = TypeVar("V")
+
+
+class BaseStoreSyncTests(ABC, Generic[V]):
+ """Test suite for checking the key-value API of a BaseStore.
+
+ This test suite verifies the basic key-value API of a BaseStore.
+
+ The test suite is designed for synchronous key-value stores.
+
+ Implementers should subclass this test suite and provide a fixture
+ that returns an empty key-value store for each test.
+ """
+
+ @abstractmethod
+ @pytest.fixture
+ def kv_store(self) -> BaseStore[str, V]:
+ """Get the key-value store class to test.
+
+ The returned key-value store should be EMPTY.
+ """
+
+ @abstractmethod
+ @pytest.fixture()
+ def three_values(self) -> Tuple[V, V, V]:
+ """Thee example values that will be used in the tests."""
+ pass
+
+ def test_three_values(self, three_values: Tuple[V, V, V]) -> None:
+ """Test that the fixture provides three values."""
+ assert isinstance(three_values, tuple)
+ assert len(three_values) == 3
+
+ def test_kv_store_is_empty(self, kv_store: BaseStore[str, V]) -> None:
+ """Test that the key-value store is empty."""
+ keys = ["foo", "bar", "buzz"]
+ assert kv_store.mget(keys) == [None, None, None]
+
+ def test_set_and_get_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test setting and getting values in the key-value store."""
+ foo = three_values[0]
+ bar = three_values[1]
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+ assert kv_store.mget(["foo", "bar"]) == [foo, bar]
+
+ def test_store_still_empty(self, kv_store: BaseStore[str, V]) -> None:
+ """This test should follow a test that sets values.
+
+ This just verifies that the fixture is set up properly to be empty
+ after each test.
+ """
+ keys = ["foo"]
+ assert kv_store.mget(keys) == [None]
+
+ def test_delete_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test deleting values from the key-value store."""
+ foo = three_values[0]
+ bar = three_values[1]
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+ kv_store.mdelete(["foo"])
+ assert kv_store.mget(["foo", "bar"]) == [None, bar]
+
+ def test_delete_bulk_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can delete several values at once."""
+ foo, bar, buz = three_values
+ key_values = [("foo", foo), ("bar", bar), ("buz", buz)]
+ kv_store.mset(key_values)
+ kv_store.mdelete(["foo", "buz"])
+ assert kv_store.mget(["foo", "bar", "buz"]) == [None, bar, None]
+
+ def test_delete_missing_keys(self, kv_store: BaseStore[str, V]) -> None:
+ """Deleting missing keys should not raise an exception."""
+ kv_store.mdelete(["foo"])
+ kv_store.mdelete(["foo", "bar", "baz"])
+
+ def test_set_values_is_idempotent(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Setting values by key should be idempotent."""
+ foo, bar, _ = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+ kv_store.mset(key_value_pairs)
+ assert kv_store.mget(["foo", "bar"]) == [foo, bar]
+ assert sorted(kv_store.yield_keys()) == ["bar", "foo"]
+
+ def test_get_can_get_same_value(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that the same value can be retrieved multiple times."""
+ foo, bar, _ = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+ # This test assumes kv_store does not handle duplicates by default
+ assert kv_store.mget(["foo", "bar", "foo", "bar"]) == [foo, bar, foo, bar]
+
+ def test_overwrite_values_by_key(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can overwrite values by key using mset."""
+ foo, bar, buzz = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+
+ # Now overwrite value of key "foo"
+ new_key_value_pairs = [("foo", buzz)]
+ kv_store.mset(new_key_value_pairs)
+
+ # Check that the value has been updated
+ assert kv_store.mget(["foo", "bar"]) == [buzz, bar]
+
+ def test_yield_keys(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can yield keys from the store."""
+ foo, bar, buzz = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ kv_store.mset(key_value_pairs)
+
+ generator = kv_store.yield_keys()
+ assert isinstance(generator, Generator)
+
+ assert sorted(kv_store.yield_keys()) == ["bar", "foo"]
+ assert sorted(kv_store.yield_keys(prefix="foo")) == ["foo"]
+
+
+class BaseStoreAsyncTests(ABC):
+ """Test suite for checking the key-value API of a BaseStore.
+
+ This test suite verifies the basic key-value API of a BaseStore.
+
+ The test suite is designed for synchronous key-value stores.
+
+ Implementers should subclass this test suite and provide a fixture
+ that returns an empty key-value store for each test.
+ """
+
+ @abstractmethod
+ @pytest.fixture
+ async def kv_store(self) -> BaseStore[str, V]:
+ """Get the key-value store class to test.
+
+ The returned key-value store should be EMPTY.
+ """
+
+ @abstractmethod
+ @pytest.fixture()
+ def three_values(self) -> Tuple[V, V, V]:
+ """Thee example values that will be used in the tests."""
+ pass
+
+ async def test_three_values(self, three_values: Tuple[V, V, V]) -> None:
+ """Test that the fixture provides three values."""
+ assert isinstance(three_values, tuple)
+ assert len(three_values) == 3
+
+ async def test_kv_store_is_empty(self, kv_store: BaseStore[str, V]) -> None:
+ """Test that the key-value store is empty."""
+ keys = ["foo", "bar", "buzz"]
+ assert await kv_store.amget(keys) == [None, None, None]
+
+ async def test_set_and_get_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test setting and getting values in the key-value store."""
+ foo = three_values[0]
+ bar = three_values[1]
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+ assert await kv_store.amget(["foo", "bar"]) == [foo, bar]
+
+ async def test_store_still_empty(self, kv_store: BaseStore[str, V]) -> None:
+ """This test should follow a test that sets values.
+
+ This just verifies that the fixture is set up properly to be empty
+ after each test.
+ """
+ keys = ["foo"]
+ assert await kv_store.amget(keys) == [None]
+
+ async def test_delete_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test deleting values from the key-value store."""
+ foo = three_values[0]
+ bar = three_values[1]
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+ await kv_store.amdelete(["foo"])
+ assert await kv_store.amget(["foo", "bar"]) == [None, bar]
+
+ async def test_delete_bulk_values(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can delete several values at once."""
+ foo, bar, buz = three_values
+ key_values = [("foo", foo), ("bar", bar), ("buz", buz)]
+ await kv_store.amset(key_values)
+ await kv_store.amdelete(["foo", "buz"])
+ assert await kv_store.amget(["foo", "bar", "buz"]) == [None, bar, None]
+
+ async def test_delete_missing_keys(self, kv_store: BaseStore[str, V]) -> None:
+ """Deleting missing keys should not raise an exception."""
+ await kv_store.amdelete(["foo"])
+ await kv_store.amdelete(["foo", "bar", "baz"])
+
+ async def test_set_values_is_idempotent(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Setting values by key should be idempotent."""
+ foo, bar, _ = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+ await kv_store.amset(key_value_pairs)
+ assert await kv_store.amget(["foo", "bar"]) == [foo, bar]
+ assert sorted(kv_store.yield_keys()) == ["bar", "foo"]
+
+ async def test_get_can_get_same_value(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that the same value can be retrieved multiple times."""
+ foo, bar, _ = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+ # This test assumes kv_store does not handle duplicates by async default
+ assert await kv_store.amget(["foo", "bar", "foo", "bar"]) == [
+ foo,
+ bar,
+ foo,
+ bar,
+ ]
+
+ async def test_overwrite_values_by_key(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can overwrite values by key using mset."""
+ foo, bar, buzz = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+
+ # Now overwrite value of key "foo"
+ new_key_value_pairs = [("foo", buzz)]
+ await kv_store.amset(new_key_value_pairs)
+
+ # Check that the value has been updated
+ assert await kv_store.amget(["foo", "bar"]) == [buzz, bar]
+
+ async def test_yield_keys(
+ self, kv_store: BaseStore[str, V], three_values: Tuple[V, V, V]
+ ) -> None:
+ """Test that we can yield keys from the store."""
+ foo, bar, buzz = three_values
+ key_value_pairs = [("foo", foo), ("bar", bar)]
+ await kv_store.amset(key_value_pairs)
+
+ generator = kv_store.ayield_keys()
+ assert isinstance(generator, AsyncGenerator)
+
+ assert sorted([key async for key in kv_store.ayield_keys()]) == ["bar", "foo"]
+ assert sorted([key async for key in kv_store.ayield_keys(prefix="foo")]) == [
+ "foo"
+ ]
diff --git a/libs/standard-tests/langchain_standard_tests/integration_tests/vectorstores.py b/libs/standard-tests/langchain_standard_tests/integration_tests/vectorstores.py
new file mode 100644
index 0000000000000..d65eb12934947
--- /dev/null
+++ b/libs/standard-tests/langchain_standard_tests/integration_tests/vectorstores.py
@@ -0,0 +1,301 @@
+"""Test suite to test vectostores."""
+from abc import ABC, abstractmethod
+
+import pytest
+from langchain_core.documents import Document
+from langchain_core.embeddings.fake import DeterministicFakeEmbedding, Embeddings
+from langchain_core.vectorstores import VectorStore
+
+# Arbitrarily chosen. Using a small embedding size
+# so tests are faster and easier to debug.
+EMBEDDING_SIZE = 6
+
+
+class ReadWriteTestSuite(ABC):
+ """Test suite for checking the read-write API of a vectorstore.
+
+ This test suite verifies the basic read-write API of a vectorstore.
+
+ The test suite is designed for synchronous vectorstores.
+
+ Implementers should subclass this test suite and provide a fixture
+ that returns an empty vectorstore for each test.
+
+ The fixture should use the `get_embeddings` method to get a pre-defined
+ embeddings model that should be used for this test suite.
+ """
+
+ @abstractmethod
+ @pytest.fixture
+ def vectorstore(self) -> VectorStore:
+ """Get the vectorstore class to test.
+
+ The returned vectorstore should be EMPTY.
+ """
+
+ @staticmethod
+ def get_embeddings() -> Embeddings:
+ """A pre-defined embeddings model that should be used for this test."""
+ return DeterministicFakeEmbedding(
+ size=EMBEDDING_SIZE,
+ )
+
+ def test_vectorstore_is_empty(self, vectorstore: VectorStore) -> None:
+ """Test that the vectorstore is empty."""
+ assert vectorstore.similarity_search("foo", k=1) == []
+
+ def test_add_documents(self, vectorstore: VectorStore) -> None:
+ """Test adding documents into the vectorstore."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ vectorstore.add_documents(documents)
+ documents = vectorstore.similarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="foo", metadata={"id": 1}),
+ ]
+
+ def test_vectorstore_still_empty(self, vectorstore: VectorStore) -> None:
+ """This test should follow a test that adds documents.
+
+ This just verifies that the fixture is set up properly to be empty
+ after each test.
+ """
+ assert vectorstore.similarity_search("foo", k=1) == []
+
+ def test_deleting_documents(self, vectorstore: VectorStore) -> None:
+ """Test deleting documents from the vectorstore."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ vectorstore.add_documents(documents, ids=["1", "2"])
+ vectorstore.delete(["1"])
+ documents = vectorstore.similarity_search("foo", k=1)
+ assert documents == [Document(page_content="bar", metadata={"id": 2})]
+
+ def test_deleting_bulk_documents(self, vectorstore: VectorStore) -> None:
+ """Test that we can delete several documents at once."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="baz", metadata={"id": 3}),
+ ]
+
+ vectorstore.add_documents(documents, ids=["1", "2", "3"])
+ vectorstore.delete(["1", "2"])
+ documents = vectorstore.similarity_search("foo", k=1)
+ assert documents == [Document(page_content="baz", metadata={"id": 3})]
+
+ def test_delete_missing_content(self, vectorstore: VectorStore) -> None:
+ """Deleting missing content should not raise an exception."""
+ vectorstore.delete(["1"])
+ vectorstore.delete(["1", "2", "3"])
+
+ def test_add_documents_with_ids_is_idempotent(
+ self, vectorstore: VectorStore
+ ) -> None:
+ """Adding by ID should be idempotent."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ vectorstore.add_documents(documents, ids=["1", "2"])
+ vectorstore.add_documents(documents, ids=["1", "2"])
+ documents = vectorstore.similarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="foo", metadata={"id": 1}),
+ ]
+
+ def test_add_documents_without_ids_gets_duplicated(
+ self, vectorstore: VectorStore
+ ) -> None:
+ """Adding documents without specifying IDs should duplicate content."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ vectorstore.add_documents(documents)
+ vectorstore.add_documents(documents)
+ documents = vectorstore.similarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ def test_add_documents_by_id_with_mutation(self, vectorstore: VectorStore) -> None:
+ """Test that we can overwrite by ID using add_documents."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ vectorstore.add_documents(documents=documents, ids=["1", "2"])
+
+ # Now over-write content of ID 1
+ new_documents = [
+ Document(
+ page_content="new foo", metadata={"id": 1, "some_other_field": "foo"}
+ ),
+ ]
+
+ vectorstore.add_documents(documents=new_documents, ids=["1"])
+
+ # Check that the content has been updated
+ documents = vectorstore.similarity_search("new foo", k=2)
+ assert documents == [
+ Document(
+ page_content="new foo", metadata={"id": 1, "some_other_field": "foo"}
+ ),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+
+class AsyncReadWriteTestSuite(ABC):
+ """Test suite for checking the **async** read-write API of a vectorstore.
+
+ This test suite verifies the basic read-write API of a vectorstore.
+
+ The test suite is designed for asynchronous vectorstores.
+
+ Implementers should subclass this test suite and provide a fixture
+ that returns an empty vectorstore for each test.
+
+ The fixture should use the `get_embeddings` method to get a pre-defined
+ embeddings model that should be used for this test suite.
+ """
+
+ @abstractmethod
+ @pytest.fixture
+ async def vectorstore(self) -> VectorStore:
+ """Get the vectorstore class to test.
+
+ The returned vectorstore should be EMPTY.
+ """
+
+ @staticmethod
+ def get_embeddings() -> Embeddings:
+ """A pre-defined embeddings model that should be used for this test."""
+ return DeterministicFakeEmbedding(
+ size=EMBEDDING_SIZE,
+ )
+
+ async def test_vectorstore_is_empty(self, vectorstore: VectorStore) -> None:
+ """Test that the vectorstore is empty."""
+ assert await vectorstore.asimilarity_search("foo", k=1) == []
+
+ async def test_add_documents(self, vectorstore: VectorStore) -> None:
+ """Test adding documents into the vectorstore."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ await vectorstore.aadd_documents(documents)
+ documents = await vectorstore.asimilarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="foo", metadata={"id": 1}),
+ ]
+
+ async def test_vectorstore_still_empty(self, vectorstore: VectorStore) -> None:
+ """This test should follow a test that adds documents.
+
+ This just verifies that the fixture is set up properly to be empty
+ after each test.
+ """
+ assert await vectorstore.asimilarity_search("foo", k=1) == []
+
+ async def test_deleting_documents(self, vectorstore: VectorStore) -> None:
+ """Test deleting documents from the vectorstore."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ await vectorstore.aadd_documents(documents, ids=["1", "2"])
+ await vectorstore.adelete(["1"])
+ documents = await vectorstore.asimilarity_search("foo", k=1)
+ assert documents == [Document(page_content="bar", metadata={"id": 2})]
+
+ async def test_deleting_bulk_documents(self, vectorstore: VectorStore) -> None:
+ """Test that we can delete several documents at once."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="baz", metadata={"id": 3}),
+ ]
+
+ await vectorstore.aadd_documents(documents, ids=["1", "2", "3"])
+ await vectorstore.adelete(["1", "2"])
+ documents = await vectorstore.asimilarity_search("foo", k=1)
+ assert documents == [Document(page_content="baz", metadata={"id": 3})]
+
+ async def test_delete_missing_content(self, vectorstore: VectorStore) -> None:
+ """Deleting missing content should not raise an exception."""
+ await vectorstore.adelete(["1"])
+ await vectorstore.adelete(["1", "2", "3"])
+
+ async def test_add_documents_with_ids_is_idempotent(
+ self, vectorstore: VectorStore
+ ) -> None:
+ """Adding by ID should be idempotent."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+ await vectorstore.aadd_documents(documents, ids=["1", "2"])
+ await vectorstore.aadd_documents(documents, ids=["1", "2"])
+ documents = await vectorstore.asimilarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="foo", metadata={"id": 1}),
+ ]
+
+ async def test_add_documents_without_ids_gets_duplicated(
+ self, vectorstore: VectorStore
+ ) -> None:
+ """Adding documents without specifying IDs should duplicate content."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ await vectorstore.aadd_documents(documents)
+ await vectorstore.aadd_documents(documents)
+ documents = await vectorstore.asimilarity_search("bar", k=2)
+ assert documents == [
+ Document(page_content="bar", metadata={"id": 2}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ async def test_add_documents_by_id_with_mutation(
+ self, vectorstore: VectorStore
+ ) -> None:
+ """Test that we can overwrite by ID using add_documents."""
+ documents = [
+ Document(page_content="foo", metadata={"id": 1}),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
+
+ await vectorstore.aadd_documents(documents=documents, ids=["1", "2"])
+
+ # Now over-write content of ID 1
+ new_documents = [
+ Document(
+ page_content="new foo", metadata={"id": 1, "some_other_field": "foo"}
+ ),
+ ]
+
+ await vectorstore.aadd_documents(documents=new_documents, ids=["1"])
+
+ # Check that the content has been updated
+ documents = await vectorstore.asimilarity_search("new foo", k=2)
+ assert documents == [
+ Document(
+ page_content="new foo", metadata={"id": 1, "some_other_field": "foo"}
+ ),
+ Document(page_content="bar", metadata={"id": 2}),
+ ]
diff --git a/libs/standard-tests/tests/unit_tests/__init__.py b/libs/standard-tests/tests/unit_tests/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/libs/standard-tests/tests/unit_tests/test_in_memory_base_store.py b/libs/standard-tests/tests/unit_tests/test_in_memory_base_store.py
new file mode 100644
index 0000000000000..245b096554eae
--- /dev/null
+++ b/libs/standard-tests/tests/unit_tests/test_in_memory_base_store.py
@@ -0,0 +1,30 @@
+"""Tests for the InMemoryStore class."""
+from typing import Tuple
+
+import pytest
+from langchain_core.stores import InMemoryStore
+
+from langchain_standard_tests.integration_tests.base_store import (
+ BaseStoreAsyncTests,
+ BaseStoreSyncTests,
+)
+
+
+class TestInMemoryStore(BaseStoreSyncTests):
+ @pytest.fixture
+ def three_values(self) -> Tuple[str, str, str]:
+ return "foo", "bar", "buzz"
+
+ @pytest.fixture
+ def kv_store(self) -> InMemoryStore:
+ return InMemoryStore()
+
+
+class TestInMemoryStoreAsync(BaseStoreAsyncTests):
+ @pytest.fixture
+ def three_values(self) -> Tuple[str, str, str]: # type: ignore
+ return "foo", "bar", "buzz"
+
+ @pytest.fixture
+ async def kv_store(self) -> InMemoryStore:
+ return InMemoryStore()
diff --git a/libs/standard-tests/tests/unit_tests/test_in_memory_vectorstore.py b/libs/standard-tests/tests/unit_tests/test_in_memory_vectorstore.py
new file mode 100644
index 0000000000000..d34bf25c3881c
--- /dev/null
+++ b/libs/standard-tests/tests/unit_tests/test_in_memory_vectorstore.py
@@ -0,0 +1,28 @@
+import pytest
+from langchain_core.vectorstores import VectorStore
+
+from langchain_standard_tests.integration_tests.vectorstores import (
+ AsyncReadWriteTestSuite,
+ ReadWriteTestSuite,
+)
+
+# We'll need to move this dependency to core
+pytest.importorskip("langchain_community")
+
+from langchain_community.vectorstores.inmemory import ( # type: ignore # noqa
+ InMemoryVectorStore,
+)
+
+
+class TestInMemoryVectorStore(ReadWriteTestSuite):
+ @pytest.fixture
+ def vectorstore(self) -> VectorStore:
+ embeddings = self.get_embeddings()
+ return InMemoryVectorStore(embedding=embeddings)
+
+
+class TestAysncInMemoryVectorStore(AsyncReadWriteTestSuite):
+ @pytest.fixture
+ async def vectorstore(self) -> VectorStore:
+ embeddings = self.get_embeddings()
+ return InMemoryVectorStore(embedding=embeddings)
diff --git a/pyproject.toml b/pyproject.toml
index 41a4f9edc8604..971c5b55a169b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -71,7 +71,7 @@ ignore-regex = '.*(Stati Uniti|Tense=Pres).*'
# whats is a typo but used frequently in queries so kept as is
# aapply - async apply
# unsecure - typo but part of API, decided to not bother for now
-ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin'
+ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin,cann'
[tool.ruff]
extend-include = ["*.ipynb"]