diff --git a/Dockerfile b/Dockerfile
index 1bb8f93a..1edde291 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,6 @@
FROM python:3.11-slim
WORKDIR /app
COPY . .
-RUN pip install flask praisonai==0.0.30 gunicorn markdown
+RUN pip install flask praisonai==0.0.31 gunicorn markdown
EXPOSE 8080
CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]
diff --git a/docs/api/praisonai/deploy.html b/docs/api/praisonai/deploy.html
index c17b49e9..23a7134c 100644
--- a/docs/api/praisonai/deploy.html
+++ b/docs/api/praisonai/deploy.html
@@ -84,7 +84,7 @@
Module praisonai.deploy
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==0.0.30 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==0.0.31 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
@@ -250,7 +250,7 @@ Raises
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==0.0.30 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==0.0.31 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
@@ -416,7 +416,7 @@ Raises
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==0.0.30 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==0.0.31 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
diff --git a/docs/create_custom_tools.md b/docs/create_custom_tools.md
deleted file mode 100644
index 93136e92..00000000
--- a/docs/create_custom_tools.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# Create Custom Tools
-## TL;DR to Create a Custom Tool
-
-```bash
-pip install praisonai duckduckgo-search
-export OPENAI_API_KEY="Enter your API key"
-praisonai --init research about the latest AI News and prepare a detailed report
-```
-
-- Add `- InternetSearchTool` in the agents.yaml file in the tools section.
-- Create a file called tools.py and add this code [tools.py](./tools.py)
-
-```bash
-praisonai
-```
-
-## Pre-requisite to Create a Custom Tool
-`agents.yaml` file should be present in the current directory.
-
-If it doesn't exist, create it by running the command `praisonai --init research about the latest AI News and prepare a detailed report`.
-
-### Step 1 to Create a Custom Tool
-
-Create a file called tools.py in the same directory as the agents.yaml file.
-
-```python
-# example tools.py
-from duckduckgo_search import DDGS
-from praisonai_tools import BaseTool
-
-class InternetSearchTool(BaseTool):
- name: str = "InternetSearchTool"
- description: str = "Search Internet for relevant information based on a query or latest news"
-
- def _run(self, query: str):
- ddgs = DDGS()
- results = ddgs.text(keywords=query, region='wt-wt', safesearch='moderate', max_results=5)
- return results
-```
-
-### Step 2 to Create a Custom Tool
-
-Add the tool to the agents.yaml file as show below under the tools section `- InternetSearchTool`.
-
-```yaml
-framework: crewai
-topic: research about the latest AI News and prepare a detailed report
-roles:
- research_analyst:
- backstory: Experienced in gathering and analyzing data related to AI news trends.
- goal: Analyze AI News trends
- role: Research Analyst
- tasks:
- gather_data:
- description: Conduct in-depth research on the latest AI News trends from reputable
- sources.
- expected_output: Comprehensive report on current AI News trends.
- tools:
- - InternetSearchTool
-```
diff --git a/docs/custom_tools.md b/docs/custom_tools.md
new file mode 100644
index 00000000..5f615216
--- /dev/null
+++ b/docs/custom_tools.md
@@ -0,0 +1,135 @@
+# Create Custom Tools
+
+Sure! Let's go through the steps to install and set up the PraisonAI tool.
+
+## Step 1: Install the `praisonai` Package
+
+First, you need to install the `praisonai` package. Open your terminal and run the following command:
+
+```bash
+pip install praisonai
+```
+
+## Step 2: Create the `InternetSearchTool`
+
+Next, create a file named `tools.py` and add the following code to define the `InternetSearchTool`:
+
+```python
+from duckduckgo_search import DDGS
+from praisonai_tools import BaseTool
+
+class InternetSearchTool(BaseTool):
+ name: str = "Internet Search Tool"
+ description: str = "Search Internet for relevant information based on a query or latest news"
+
+ def _run(self, query: str):
+ ddgs = DDGS()
+ results = ddgs.text(keywords=query, region='wt-wt', safesearch='moderate', max_results=5)
+ return results
+```
+
+## Step 3: Define the Agent Configuration
+
+Create a file named `agents.yaml` and add the following content to configure the agent:
+
+```yaml
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - InternetSearchTool
+```
+
+## Step 4: Run the PraisonAI Tool
+
+To run the PraisonAI tool, simply type the following command in your terminal:
+
+```bash
+praisonai
+```
+
+If you want to run the `autogen` framework, use:
+
+```bash
+praisonai --framework autogen
+```
+
+## Prerequisites
+
+Ensure you have the `duckduckgo_search` package installed. If not, you can install it using:
+
+```bash
+pip install duckduckgo_search
+```
+
+That's it! You should now have the PraisonAI tool installed and configured.
+
+## Other information
+
+### TL;DR to Create a Custom Tool
+
+```bash
+pip install praisonai duckduckgo-search
+export OPENAI_API_KEY="Enter your API key"
+praisonai --init research about the latest AI News and prepare a detailed report
+```
+
+- Add `- InternetSearchTool` in the agents.yaml file in the tools section.
+- Create a file called tools.py and add this code [tools.py](./tools.py)
+
+```bash
+praisonai
+```
+
+### Pre-requisite to Create a Custom Tool
+`agents.yaml` file should be present in the current directory.
+
+If it doesn't exist, create it by running the command `praisonai --init research about the latest AI News and prepare a detailed report`.
+
+#### Step 1 to Create a Custom Tool
+
+Create a file called tools.py in the same directory as the agents.yaml file.
+
+```python
+# example tools.py
+from duckduckgo_search import DDGS
+from praisonai_tools import BaseTool
+
+class InternetSearchTool(BaseTool):
+ name: str = "InternetSearchTool"
+ description: str = "Search Internet for relevant information based on a query or latest news"
+
+ def _run(self, query: str):
+ ddgs = DDGS()
+ results = ddgs.text(keywords=query, region='wt-wt', safesearch='moderate', max_results=5)
+ return results
+```
+
+#### Step 2 to Create a Custom Tool
+
+Add the tool to the agents.yaml file as show below under the tools section `- InternetSearchTool`.
+
+```yaml
+framework: crewai
+topic: research about the latest AI News and prepare a detailed report
+roles:
+ research_analyst:
+ backstory: Experienced in gathering and analyzing data related to AI news trends.
+ goal: Analyze AI News trends
+ role: Research Analyst
+ tasks:
+ gather_data:
+ description: Conduct in-depth research on the latest AI News trends from reputable
+ sources.
+ expected_output: Comprehensive report on current AI News trends.
+ tools:
+ - InternetSearchTool
+```
diff --git a/docs/firecrawl.md b/docs/firecrawl.md
new file mode 100644
index 00000000..8e8dd448
--- /dev/null
+++ b/docs/firecrawl.md
@@ -0,0 +1,42 @@
+# Firecrawl PraisonAI Integration
+
+## Firecrawl running in Localhost:3002
+
+```
+from firecrawl import FirecrawlApp
+from praisonai_tools import BaseTool
+import re
+
+class WebPageScraperTool(BaseTool):
+ name: str = "Web Page Scraper Tool"
+ description: str = "Scrape and extract information from a given web page URL."
+
+ def _run(self, url: str) -> str:
+ app = FirecrawlApp(api_url='http://localhost:3002')
+ response = app.scrape_url(url=url)
+ content = response["content"]
+ # Remove all content above the line "========================================================"
+ if "========================================================" in content:
+ content = content.split("========================================================", 1)[1]
+
+ # Remove all menu items and similar patterns
+ content = re.sub(r'\*\s+\[.*?\]\(.*?\)', '', content)
+ content = re.sub(r'\[Skip to the content\]\(.*?\)', '', content)
+ content = re.sub(r'\[.*?\]\(.*?\)', '', content)
+ content = re.sub(r'\s*Menu\s*', '', content)
+ content = re.sub(r'\s*Search\s*', '', content)
+ content = re.sub(r'Categories\s*', '', content)
+
+ # Remove all URLs
+ content = re.sub(r'http\S+', '', content)
+
+ # Remove empty lines or lines with only whitespace
+ content = '\n'.join([line for line in content.split('\n') if line.strip()])
+
+ # Limit to the first 1000 words
+ words = content.split()
+ if len(words) > 1000:
+ content = ' '.join(words[:1000])
+
+ return content
+```
\ No newline at end of file
diff --git a/docs/langchain.md b/docs/langchain.md
new file mode 100644
index 00000000..7e6df3ff
--- /dev/null
+++ b/docs/langchain.md
@@ -0,0 +1,69 @@
+# Langchain Tools
+
+## Integrate Langchain Direct Tools
+
+```
+pip install youtube_search praisonai langchain_community langchain
+```
+
+```
+# tools.py
+from langchain_community.tools import YouTubeSearchTool
+```
+
+```
+# agents.yaml
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung
+ diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - 'YouTubeSearchTool'
+```
+
+## Integrate Langchain with Wrappers
+
+```
+pip install wikipedia langchain_community
+```
+
+```
+# tools.py
+from langchain_community.utilities import WikipediaAPIWrapper
+class WikipediaSearchTool(BaseTool):
+ name: str = "WikipediaSearchTool"
+ description: str = "Search Wikipedia for relevant information based on a query."
+
+ def _run(self, query: str):
+ api_wrapper = WikipediaAPIWrapper(top_k_results=4, doc_content_chars_max=100)
+ results = api_wrapper.load(query=query)
+ return results
+```
+
+```
+# agents.yaml
+framework: crewai
+topic: research about nvidia growth
+roles:
+ data_collector:
+ backstory: An experienced researcher with the ability to efficiently collect and
+ organize vast amounts of data.
+ goal: Gather information on Nvidia's growth by providing the Ticket Symbol to YahooFinanceNewsTool
+ role: Data Collector
+ tasks:
+ data_collection_task:
+ description: Collect data on Nvidia's growth from various sources such as
+ financial reports, news articles, and company announcements.
+ expected_output: A comprehensive document detailing data points on Nvidia's
+ growth over the years.
+ tools:
+ - 'WikipediaSearchTool'
+```
\ No newline at end of file
diff --git a/docs/reddit.md b/docs/reddit.md
new file mode 100644
index 00000000..d452b51c
--- /dev/null
+++ b/docs/reddit.md
@@ -0,0 +1,32 @@
+# Reddit PraisonAI Integration
+
+```
+export REDDIT_USER_AGENT=[USER]
+export REDDIT_CLIENT_SECRET=xxxxxx
+export REDDIT_CLIENT_ID=xxxxxx
+```
+
+tools.py
+
+```
+from langchain_community.tools.reddit_search.tool import RedditSearchRun
+```
+
+agents.yaml
+
+```
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung
+ diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - 'RedditSearchRun'
+```
\ No newline at end of file
diff --git a/docs/tavily.md b/docs/tavily.md
new file mode 100644
index 00000000..4eba47c9
--- /dev/null
+++ b/docs/tavily.md
@@ -0,0 +1,15 @@
+# Tavily PraisonAI Integration
+
+````
+from praisonai_tools import BaseTool
+from langchain.utilities.tavily_search import TavilySearchAPIWrapper
+
+class TavilyTool(BaseTool):
+ name: str = "TavilyTool"
+ description: str = "Search Tavily for relevant information based on a query."
+
+ def _run(self, query: str):
+ api_wrapper = TavilySearchAPIWrapper()
+ results = api_wrapper.results(query=query, max_results=5)
+ return results
+```
\ No newline at end of file
diff --git a/docs/tools.md b/docs/tools.md
new file mode 100644
index 00000000..21c80f3c
--- /dev/null
+++ b/docs/tools.md
@@ -0,0 +1,43 @@
+# Tools
+
+## Inbuild Tools
+
+- CodeDocsSearchTool
+- CSVSearchTool
+- DirectorySearchTool
+- DirectoryReadTool
+- DOCXSearchTool
+- FileReadTool
+- GithubSearchTool
+- SerperDevTool
+- TXTSearchTool
+- JSONSearchTool
+- MDXSearchTool
+- PDFSearchTool
+- PGSearchTool
+- RagTool
+- ScrapeElementFromWebsiteTool
+- ScrapeWebsiteTool
+- SeleniumScrapingTool
+- WebsiteSearchTool
+- XMLSearchTool
+- YoutubeChannelSearchTool
+- YoutubeVideoSearchTool
+
+## Example Usage
+
+```
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - WebsiteSearchTool
+```
\ No newline at end of file
diff --git a/docs/wikipedia.md b/docs/wikipedia.md
new file mode 100644
index 00000000..222da443
--- /dev/null
+++ b/docs/wikipedia.md
@@ -0,0 +1,38 @@
+# Wikipedia PraisonAI Integration
+
+```
+pip install wikipedia langchain_community
+```
+
+```
+# tools.py
+from langchain_community.utilities import WikipediaAPIWrapper
+class WikipediaSearchTool(BaseTool):
+ name: str = "WikipediaSearchTool"
+ description: str = "Search Wikipedia for relevant information based on a query."
+
+ def _run(self, query: str):
+ api_wrapper = WikipediaAPIWrapper(top_k_results=4, doc_content_chars_max=100)
+ results = api_wrapper.load(query=query)
+ return results
+```
+
+```
+# agents.yaml
+framework: crewai
+topic: research about nvidia growth
+roles:
+ data_collector:
+ backstory: An experienced researcher with the ability to efficiently collect and
+ organize vast amounts of data.
+ goal: Gather information on Nvidia's growth by providing the Ticket Symbol to YahooFinanceNewsTool
+ role: Data Collector
+ tasks:
+ data_collection_task:
+ description: Collect data on Nvidia's growth from various sources such as
+ financial reports, news articles, and company announcements.
+ expected_output: A comprehensive document detailing data points on Nvidia's
+ growth over the years.
+ tools:
+ - 'WikipediaSearchTool'
+```
\ No newline at end of file
diff --git a/docs/you.com.md b/docs/you.com.md
new file mode 100644
index 00000000..916aca45
--- /dev/null
+++ b/docs/you.com.md
@@ -0,0 +1,36 @@
+# You.com PraisonAI Integration
+
+```
+export YDC_API_KEY=xxxxxxxxxxxx
+```
+tools.py
+```
+from langchain_community.utilities.you import YouSearchAPIWrapper
+class YouSearchTool(BaseTool):
+ name: str = "You Search Tool"
+ description: str = "Search You.com for relevant information based on a query."
+
+ def _run(self, query: str):
+ api_wrapper = YouSearchAPIWrapper()
+ results = api_wrapper.results(query=query, max_results=5)
+ return results
+```
+
+agents.yaml
+
+```
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung
+ diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - 'YouSearchTool'
+```
\ No newline at end of file
diff --git a/docs/youtube.md b/docs/youtube.md
new file mode 100644
index 00000000..55212667
--- /dev/null
+++ b/docs/youtube.md
@@ -0,0 +1,28 @@
+# YouTube Search PraisonAI Integration
+
+```
+pip install youtube_search praisonai langchain_community langchain
+```
+
+```
+# tools.py
+from langchain_community.tools import YouTubeSearchTool
+```
+
+```
+# agents.yaml
+framework: crewai
+topic: research about the causes of lung disease
+roles:
+ research_analyst:
+ backstory: Experienced in analyzing scientific data related to respiratory health.
+ goal: Analyze data on lung diseases
+ role: Research Analyst
+ tasks:
+ data_analysis:
+ description: Gather and analyze data on the causes and risk factors of lung
+ diseases.
+ expected_output: Report detailing key findings on lung disease causes.
+ tools:
+ - 'YouTubeSearchTool'
+```
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
index 90a03e74..79b104e3 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -8,20 +8,38 @@ repo_name: MervinPraison/PraisonAI
repo_url: https://github.com/MervinPraison/PraisonAI
nav:
- - Home: index.md
+- 🏠 Home: 'index.md'
- TL;DR: tldr.md
+ - User Interface: ui.md
+ - Other Models: other_models.md
+
+- ⚡ Getting Started:
- Installation: installation.md
- Initialise: initialise.md
- Run: run.md
- - User Interface: ui.md
- - Create Custom Tools: create_custom_tools.md
+
+- ⚙️ Tools:
+ - Tools: tools.md
+ - Create Custom Tools: custom_tools.md
+ - Langchain: langchain.md
+ - Firecrawl: firecrawl.md
+ - Wikipedia: wikipedia.md
+ - YouTube: youtube.md
+ - Tavily: tavily.md
+ - Reddit: reddit.md
+ - You.com: you.com.md
+
+- 📚 Developers:
- Test: test.md
- Agents Playbook: agents_playbook.md
- Wrapper: wrapper.md
- Deploy: deploy.md
- - Other Models: other_models.md
- - Contributing: contributing.md
- - API Reference: api.md
+
+- 🤗 Contributing:
+ - Home: contributing.md
+
+- 🛠️ API reference:
+ - API: api.md
- API Code: api/praisonai/index.html
theme:
diff --git a/poetry.lock b/poetry.lock
index d0289008..32338788 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -356,7 +356,7 @@ files = [
name = "blinker"
version = "1.8.2"
description = "Fast, simple object-to-object and broadcast signaling"
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
@@ -365,17 +365,17 @@ files = [
[[package]]
name = "boto3"
-version = "1.34.127"
+version = "1.34.129"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "boto3-1.34.127-py3-none-any.whl", hash = "sha256:d370befe4fb7aea5bc383057d7dad18dda5d0cf3cd3295915bcc8c8c4191905c"},
- {file = "boto3-1.34.127.tar.gz", hash = "sha256:58ccdeae3a96811ecc9d5d866d8226faadbd0ee1891756e4a04d5186e9a57a64"},
+ {file = "boto3-1.34.129-py3-none-any.whl", hash = "sha256:cc73de1c9d953b1f9da6ee2404af717e93d888f790f3e0291b22d1b8489eb401"},
+ {file = "boto3-1.34.129.tar.gz", hash = "sha256:a7a696fd3e7f5f43a81450b441f3eb6c5a89d28efe867cd97d8fc73ea5d8c139"},
]
[package.dependencies]
-botocore = ">=1.34.127,<1.35.0"
+botocore = ">=1.34.129,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -384,13 +384,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.127"
+version = "1.34.129"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.127-py3-none-any.whl", hash = "sha256:e14fa28c8bb141de965e700f88b196d17c67a703c7f0f5c7e14f7dd1cf636011"},
- {file = "botocore-1.34.127.tar.gz", hash = "sha256:a377871742c40603d559103f19acb7bc93cfaf285e68f21b81637ec396099877"},
+ {file = "botocore-1.34.129-py3-none-any.whl", hash = "sha256:86d3dd30996aa459e9c3321edac12aebe47c73cb4acc7556941f9b4c39726088"},
+ {file = "botocore-1.34.129.tar.gz", hash = "sha256:7c56e25af6112d69c5d14a15b42f76ba7687687abc463a96ac5edca19c0a9c2d"},
]
[package.dependencies]
@@ -656,13 +656,13 @@ files = [
[[package]]
name = "chainlit"
-version = "1.1.301"
+version = "1.1.302"
description = "Build Conversational AI."
optional = true
python-versions = "<4.0.0,>=3.8.1"
files = [
- {file = "chainlit-1.1.301-py3-none-any.whl", hash = "sha256:d404974c42caf4216a290b82aab4dddfb5c535164c5974e0a207352196fd7cde"},
- {file = "chainlit-1.1.301.tar.gz", hash = "sha256:c577d405ab00cffb4b4d32976c13cf16095fb89e0bb9e8059c2d3d070b1a20d9"},
+ {file = "chainlit-1.1.302-py3-none-any.whl", hash = "sha256:20fbf5d7e70933552d4a4ebdd2ceefbee179138f798f4dc9160a249842f7d5fd"},
+ {file = "chainlit-1.1.302.tar.gz", hash = "sha256:513e4c126a1db31ea717adf5a7a89a46a42b82eddcdf1d4b6e4e33a9963435ce"},
]
[package.dependencies]
@@ -894,13 +894,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "cohere"
-version = "5.5.3"
+version = "5.5.8"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "cohere-5.5.3-py3-none-any.whl", hash = "sha256:99d20129713a6dae052368b4839773a214592a76bee345b94a4846d00f702da3"},
- {file = "cohere-5.5.3.tar.gz", hash = "sha256:8c7ebe2f5bf83fee8e55a24a0acdd4b0e94de274fd0ef32b285978289a03e930"},
+ {file = "cohere-5.5.8-py3-none-any.whl", hash = "sha256:e1ed84b90eadd13c6a68ee28e378a0bb955f8945eadc6eb7ee126b3399cafd54"},
+ {file = "cohere-5.5.8.tar.gz", hash = "sha256:84ce7666ff8fbdf4f41fb5f6ca452ab2639a514bc88967a2854a9b1b820d6ea0"},
]
[package.dependencies]
@@ -908,9 +908,10 @@ boto3 = ">=1.34.0,<2.0.0"
fastavro = ">=1.9.4,<2.0.0"
httpx = ">=0.21.2"
httpx-sse = ">=0.4.0,<0.5.0"
+parameterized = ">=0.9.0,<0.10.0"
pydantic = ">=1.9.2"
requests = ">=2.0.0,<3.0.0"
-tokenizers = ">=0.19,<0.20"
+tokenizers = ">=0.15,<1"
types-requests = ">=2.0.0,<3.0.0"
typing_extensions = ">=4.0.0"
@@ -1482,7 +1483,7 @@ vw = ["scikit-learn", "vowpalwabbit (>=8.10.0,<9.0.0)"]
name = "flask"
version = "3.0.3"
description = "A simple framework for building complex web applications."
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"},
@@ -1789,13 +1790,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
[[package]]
name = "google-cloud-aiplatform"
-version = "1.55.0"
+version = "1.56.0"
description = "Vertex AI API client library"
optional = false
python-versions = ">=3.8"
files = [
- {file = "google-cloud-aiplatform-1.55.0.tar.gz", hash = "sha256:aa87cb6c49ae5fde87fb831ce8ad4a853c4656fe04babe505e9144c7a9e09c1a"},
- {file = "google_cloud_aiplatform-1.55.0-py2.py3-none-any.whl", hash = "sha256:c6cc76ca5537f4636a0c3f8c0288d2e0d2d86ef708e562d2654313e11d6ee46a"},
+ {file = "google-cloud-aiplatform-1.56.0.tar.gz", hash = "sha256:d4cfb085427dac01142915f523949ac2955d6c7f148d95017d3286a77caf5d5e"},
+ {file = "google_cloud_aiplatform-1.56.0-py2.py3-none-any.whl", hash = "sha256:ee1ab3bd115c3caebf8ddfd3e47eeb8396a3ec2fc5f5baf1a5c295c8d64333ab"},
]
[package.dependencies]
@@ -1817,8 +1818,8 @@ cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
endpoint = ["requests (>=2.28.1)"]
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
-langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"]
-langchain-testing = ["absl-py", "cloudpickle (>=2.2.1,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist"]
+langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"]
+langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"]
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"]
pipelines = ["pyyaml (>=5.3.1,<7)"]
@@ -1828,7 +1829,7 @@ private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"]
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
-reasoningengine = ["cloudpickle (>=2.2.1,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
+reasoningengine = ["cloudpickle (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
vizier = ["google-vizier (>=0.1.6)"]
@@ -2534,7 +2535,7 @@ typer = ">=0.9.0,<0.10.0"
name = "itsdangerous"
version = "2.2.0"
description = "Safely pass data to untrusted environments and back."
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
@@ -2545,7 +2546,7 @@ files = [
name = "jinja2"
version = "3.1.4"
description = "A very fast and expressive template engine."
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
@@ -2939,13 +2940,13 @@ extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
[[package]]
name = "langsmith"
-version = "0.1.77"
+version = "0.1.80"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.77-py3-none-any.whl", hash = "sha256:2202cc21b1ed7e7b9e5d2af2694be28898afa048c09fdf09f620cbd9301755ae"},
- {file = "langsmith-0.1.77.tar.gz", hash = "sha256:4ace09077a9a4e412afeb4b517ca68e7de7b07f36e4792dc8236ac5207c0c0c7"},
+ {file = "langsmith-0.1.80-py3-none-any.whl", hash = "sha256:951fc29576b52afd8378d41f6db343090fea863e3620f0ca97e83b221f93c94d"},
+ {file = "langsmith-0.1.80.tar.gz", hash = "sha256:a29b1dde27612308beee424f1388ad844c8e7e375bf2ac8bdf4da174013f279d"},
]
[package.dependencies]
@@ -3939,6 +3940,20 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.9.2)"]
+[[package]]
+name = "parameterized"
+version = "0.9.0"
+description = "Parameterized testing with any Python test framework"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"},
+ {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"},
+]
+
+[package.extras]
+dev = ["jinja2"]
+
[[package]]
name = "pillow"
version = "10.3.0"
@@ -4677,13 +4692,13 @@ files = [
[[package]]
name = "pyright"
-version = "1.1.367"
+version = "1.1.368"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pyright-1.1.367-py3-none-any.whl", hash = "sha256:89de6502ae02f1552d0c4df4b46867887a419849f379db617695ef9308cf01eb"},
- {file = "pyright-1.1.367.tar.gz", hash = "sha256:b1e5522ceb246ee6bc293a43d6d0162719d6467c1f1e9b81cee741aa11cdacbd"},
+ {file = "pyright-1.1.368-py3-none-any.whl", hash = "sha256:4a86e34b61c755b43b367af7fbf927fc6466fff6b81a9dcea07d42416c640af3"},
+ {file = "pyright-1.1.368.tar.gz", hash = "sha256:9b2aa48142d9d9fc9a6aedff743c76873cc4e615f3297cdbf893d5793f75b306"},
]
[package.dependencies]
@@ -5503,64 +5518,64 @@ files = [
[[package]]
name = "sqlalchemy"
-version = "2.0.30"
+version = "2.0.31"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"},
- {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"},
- {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"},
-]
-
-[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"},
+ {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"},
+ {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"},
+ {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"},
+ {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"},
+ {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"},
+ {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"},
+ {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"},
+ {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"},
+]
+
+[package.dependencies]
+greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
typing-extensions = ">=4.6.0"
[package.extras]
@@ -6251,7 +6266,7 @@ files = [
name = "werkzeug"
version = "3.0.3"
description = "The comprehensive WSGI web application library."
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
@@ -6511,10 +6526,11 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[extras]
+api = ["flask"]
gradio = ["gradio"]
ui = ["chainlit"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "2ba6072efc0fe2fb868bf1b7a5c3947f488cf1637e1a585acfae961561f6bde0"
+content-hash = "ed013174f31333cb688d1c84764578d8a0df409e1f70ddb544fdaa44a29e7cc1"
diff --git a/praisonai/deploy.py b/praisonai/deploy.py
index e1d6458c..ec009f86 100644
--- a/praisonai/deploy.py
+++ b/praisonai/deploy.py
@@ -56,7 +56,7 @@ def create_dockerfile(self):
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
- file.write("RUN pip install flask praisonai==0.0.30 gunicorn markdown\n")
+ file.write("RUN pip install flask praisonai==0.0.31 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
diff --git a/praisonai/inbuilt_tools/autogen_tools.py b/praisonai/inbuilt_tools/autogen_tools.py
index 5b366927..1c2505cb 100644
--- a/praisonai/inbuilt_tools/autogen_tools.py
+++ b/praisonai/inbuilt_tools/autogen_tools.py
@@ -166,7 +166,11 @@ def autogen_ScrapeWebsiteTool(assistant, user_proxy):
def register_scrape_website_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
def tool_func(website_url: str) -> Any:
tool_instance = tool_class(website_url=website_url)
- return tool_instance.run()
+ content = tool_instance.run()
+ # Ensure content is properly decoded as UTF-8 if it's a bytes object
+ if isinstance(content, bytes):
+ content = content.decode('utf-8')
+ return content
register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
register_scrape_website_tool(ScrapeWebsiteTool, "scrape_website_tool", "Read website content(website_url: 'string') - A tool that can be used to read content from a specified website.", assistant, user_proxy)
diff --git a/pyproject.toml b/pyproject.toml
index f0d2127c..ddc4bebd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "PraisonAI"
-version = "0.0.30"
+version = "0.0.31"
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
authors = ["Mervin Praison"]
license = ""
@@ -21,10 +21,9 @@ crewai = ">=0.30.4"
markdown = ">=3.5"
crewai-tools = "^0.2.6"
praisonai-tools = ">=0.0.4"
-blinker = ">=1.8.2"
-Flask = ">=3.0.0"
chainlit = {version = "^1.1.301", optional = true}
gradio = {version = ">=4.26.0", optional = true}
+flask = {version = ">=3.0.0", optional = true}
[tool.poetry.dev-dependencies]
pytest = "^8.0.0"
@@ -39,4 +38,5 @@ praisonai = "praisonai.__main__:main"
[tool.poetry.extras]
ui = ["chainlit"]
-gradio = ["gradio"]
\ No newline at end of file
+gradio = ["gradio"]
+api = ["flask"]
\ No newline at end of file