Skip to content

Commit

Permalink
Merge pull request #112 from MervinPraison/develop
Browse files Browse the repository at this point in the history
.prasioninclude features release. Include Only required files
  • Loading branch information
MervinPraison authored Jul 20, 2024
2 parents e3d4950 + d22a810 commit 1af1f8e
Show file tree
Hide file tree
Showing 14 changed files with 141 additions and 37 deletions.
4 changes: 2 additions & 2 deletions .praisonignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
cookbooks
docs
tests
tests
docs
3 changes: 1 addition & 2 deletions .praisoninclude
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
praisonai/ui/*
.praison*
#praisonai
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM python:3.11-slim
WORKDIR /app
COPY . .
RUN pip install flask praisonai==0.0.54 gunicorn markdown
RUN pip install flask praisonai==0.0.55 gunicorn markdown
EXPOSE 8080
CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]
2 changes: 1 addition & 1 deletion docs/api/praisonai/deploy.html
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ <h2 id="raises">Raises</h2>
file.write(&#34;FROM python:3.11-slim\n&#34;)
file.write(&#34;WORKDIR /app\n&#34;)
file.write(&#34;COPY . .\n&#34;)
file.write(&#34;RUN pip install flask praisonai==0.0.54 gunicorn markdown\n&#34;)
file.write(&#34;RUN pip install flask praisonai==0.0.55 gunicorn markdown\n&#34;)
file.write(&#34;EXPOSE 8080\n&#34;)
file.write(&#39;CMD [&#34;gunicorn&#34;, &#34;-b&#34;, &#34;0.0.0.0:8080&#34;, &#34;api:app&#34;]\n&#39;)

Expand Down
4 changes: 2 additions & 2 deletions docs/home.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
<p align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="images/praisonai-logo-large.png">
<source media="(prefers-color-scheme: light)" srcset="images/praisonai-logo-black-large.png">
<img alt="PraisonAI Logo" src="images/praisonai-logo-black-large.png">
<source media="(prefers-color-scheme: light)" srcset="images/praisonai-logo-large.png">
<img alt="PraisonAI Logo" src="images/praisonai-logo-large.png">
</picture>
</p>

Expand Down
10 changes: 4 additions & 6 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

<p align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="images/praisonai-logo-large.png">
<source media="(prefers-color-scheme: light)" srcset="images/praisonai-logo-black-large.png">
<img alt="PraisonAI Logo" src="images/praisonai-logo-black-large.png">
<source media="(prefers-color-scheme: dark)" srcset="overrides/images/praisonai-logo-dark.png">
<source media="(prefers-color-scheme: light)" srcset="overrides/images/praisonai-logo-light.png">
<img alt="PraisonAI Logo" src="overrides/images/praisonai-logo-light.png">
</picture>
</p>

Expand All @@ -16,8 +16,6 @@

<div align="center">

# Praison AI

</div>

Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, represents a low-code, centralised framework designed to simplify the creation and orchestration of multi-agent systems for various LLM applications, emphasizing ease of use, customization, and human-agent interaction.
Expand All @@ -26,7 +24,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
<picture>
<source media="(prefers-color-scheme: dark)" srcset="images/architecture-dark.png">
<source media="(prefers-color-scheme: light)" srcset="images/architecture-light.png">
<img alt="PraisonAI Architecture" src="images/architecture-light.png">
<img alt="PraisonAI Architecture" src="images/architecture-dark.png">
</picture>
</div>

Expand Down
11 changes: 11 additions & 0 deletions docs/overrides/css/custom.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
@media (prefers-color-scheme: dark) {
.md-header__logo img {
content: url('../images/praisonai-logo-dark.png');
}
}

@media (prefers-color-scheme: light) {
.md-header__logo img {
content: url('../images/praisonai-logo-light.png');
}
}
Binary file added docs/overrides/images/praisonai-logo-dark.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/overrides/images/praisonai-logo-light.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
20 changes: 20 additions & 0 deletions docs/overrides/js/custom.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
const theme = localStorage.getItem('theme') || (window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light");
document.documentElement.setAttribute("data-theme", theme);

document.addEventListener('DOMContentLoaded', (event) => {
const toggleSwitch = document.querySelector('.theme-switch input[type="checkbox"]');
if (theme === 'dark') {
toggleSwitch.checked = true;
}
toggleSwitch.addEventListener('change', switchTheme, false);
});

function switchTheme(e) {
if (e.target.checked) {
document.documentElement.setAttribute('data-theme', 'dark');
localStorage.setItem('theme', 'dark');
} else {
document.documentElement.setAttribute('data-theme', 'light');
localStorage.setItem('theme', 'light');
}
}
32 changes: 31 additions & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ repo_url: https://github.com/MervinPraison/PraisonAI

nav:
- 🏠 Home:
- Home: home.md
- Home: index.md
- TL;DR: tldr.md

- ⚡ Getting Started:
Expand Down Expand Up @@ -69,6 +69,27 @@ nav:

theme:
name: material
features:
- navigation.tabs
- navigation.tabs.sticky
- navigation.sections
- navigation.expand
- navigation.indexes
- navigation.top
- header.autohide
- content.code.annotate
- content.tooltips
palette:
# Palette toggle for light mode
- scheme: default
toggle:
icon: material/brightness-7
name: Switch to light mode
# Palette toggle for dark mode
- scheme: slate
toggle:
icon: material/brightness-4
name: Switch to dark mode

markdown_extensions:
- admonition
Expand All @@ -87,3 +108,12 @@ plugins:
selection:
docstring_style: google
# - glightbox

extra_css:
- overrides/css/custom.css

extra_javascript:
- overrides/js/custom.js

extra:
custom_dir: docs/overrides
2 changes: 1 addition & 1 deletion praisonai/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def create_dockerfile(self):
file.write("FROM python:3.11-slim\n")
file.write("WORKDIR /app\n")
file.write("COPY . .\n")
file.write("RUN pip install flask praisonai==0.0.54 gunicorn markdown\n")
file.write("RUN pip install flask praisonai==0.0.55 gunicorn markdown\n")
file.write("EXPOSE 8080\n")
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')

Expand Down
86 changes: 66 additions & 20 deletions praisonai/ui/context.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import fnmatch
import re
import yaml
from pathlib import Path
import logging
Expand Down Expand Up @@ -34,10 +33,12 @@ def __init__(self, directory='.', output_file='context.txt',
self.max_file_size = max_file_size
self.max_tokens = int(os.getenv("PRAISONAI_MAX_TOKENS", max_tokens))
self.ignore_patterns = self.get_ignore_patterns()
self.include_paths = self.get_include_paths()
self.included_files = []

def get_ignore_patterns(self):
"""
Loads ignore patterns from various sources, prioritizing them in
Loads ignore patterns from various sources, prioritizing them in
the following order:
1. .praisonignore
2. settings.yaml (under code.ignore_files)
Expand Down Expand Up @@ -95,6 +96,19 @@ def load_from_file(filepath):
logger.debug(f"Final ignore patterns: {modified_ignore_patterns}")
return modified_ignore_patterns

def get_include_paths(self):
include_paths = []

# 1. Load from .praisoninclude
include_file = os.path.join(self.directory, '.praisoninclude')
if os.path.exists(include_file):
with open(include_file, 'r') as f:
include_paths.extend(
line.strip() for line in f
if line.strip() and not line.startswith('#')
)
return include_paths

def should_ignore(self, file_path):
"""
Check if a file or directory should be ignored based on patterns.
Expand All @@ -116,31 +130,65 @@ def is_relevant_file(self, file_path):
any(file_path.endswith(ext) for ext in self.relevant_extensions)

def gather_context(self):
"""Gather context from relevant files, respecting ignore patterns."""
"""Gather context from relevant files, respecting ignore patterns and include paths."""
context = []
total_files = 0
processed_files = 0

for root, dirs, files in os.walk(self.directory):
total_files += len(files)
dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
for file in files:
file_path = os.path.join(root, file)
if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
if not self.include_paths:
# No include paths specified, process the entire directory
for root, dirs, files in os.walk(self.directory):
total_files += len(files)
dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
for file in files:
file_path = os.path.join(root, file)
if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
self.included_files.append(Path(file_path).relative_to(self.directory))
except Exception as e:
logger.error(f"Error reading {file_path}: {e}")
processed_files += 1
print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
else:
# Process specified include paths
for include_path in self.include_paths:
full_path = os.path.join(self.directory, include_path)
if os.path.isdir(full_path):
for root, dirs, files in os.walk(full_path):
total_files += len(files)
dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
for file in files:
file_path = os.path.join(root, file)
if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
self.included_files.append(Path(file_path).relative_to(self.directory))
except Exception as e:
logger.error(f"Error reading {file_path}: {e}")
processed_files += 1
print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
elif os.path.isfile(full_path) and self.is_relevant_file(full_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
with open(full_path, 'r', encoding='utf-8') as f:
content = f.read()
context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
context.append(f"File: {full_path}\n\n{content}\n\n{'='*50}\n")
self.included_files.append(Path(full_path).relative_to(self.directory))
except Exception as e:
logger.error(f"Error reading {file_path}: {e}")
processed_files += 1
print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
logger.error(f"Error reading {full_path}: {e}")
processed_files += 1
print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)

print() # New line after progress indicator
return '\n'.join(context)

def count_tokens(self, text):
"""Count tokens using a simple whitespace-based tokenizer."""
return len(text.split())
return len(text.split())

def truncate_context(self, context):
"""Truncate context to stay within the token limit."""
Expand All @@ -165,12 +213,9 @@ def add_to_tree(path, prefix=''):
contents = sorted(path.iterdir())
pointers = [('└── ' if i == len(contents) - 1 else '├── ') for i in range(len(contents))]
for pointer, item in zip(pointers, contents):
# Use should_ignore for consistency
if self.should_ignore(item):
continue

rel_path = item.relative_to(start_dir)
tree.append(f"{prefix}{pointer}{rel_path}")
if rel_path in self.included_files:
tree.append(f"{prefix}{pointer}{rel_path}")

if item.is_dir():
add_to_tree(item, prefix + (' ' if pointer == '└── ' else '│ '))
Expand All @@ -193,6 +238,7 @@ def run(self):
def main():
gatherer = ContextGatherer()
context, token_count, context_tree = gatherer.run()
print(context_tree)
print(f"\nThe context contains approximately {token_count} tokens.")
print("First 500 characters of context:")
print(context[:500] + "...")
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "PraisonAI"
version = "0.0.54"
version = "0.0.55"
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
authors = ["Mervin Praison"]
license = ""
Expand Down

0 comments on commit 1af1f8e

Please sign in to comment.