From d21ed02a3bacffe3724a26c695f15a7bba2bb1af Mon Sep 17 00:00:00 2001 From: prabinoid <38830224+prabinoid@users.noreply.github.com> Date: Fri, 7 Feb 2025 14:41:53 +0545 Subject: [PATCH] feat: aiocache and caching implementation --- backend/api/comments/resources.py | 2 - backend/api/projects/resources.py | 3 +- backend/api/tasks/statistics.py | 1 + backend/cron.py | 154 ------------------ backend/models/dtos/project_dto.py | 2 - backend/models/dtos/validator_dto.py | 1 + backend/models/postgis/project.py | 15 -- backend/models/postgis/user.py | 1 - backend/services/mapping_service.py | 2 +- backend/services/mapswipe_service.py | 10 +- backend/services/messaging/message_service.py | 4 +- backend/services/project_search_service.py | 17 +- backend/services/project_service.py | 21 ++- backend/services/recommendation_service.py | 10 +- backend/services/stats_service.py | 14 +- backend/services/users/user_service.py | 6 +- pdm.lock | 80 ++------- pyproject.toml | 1 + 18 files changed, 75 insertions(+), 269 deletions(-) delete mode 100644 backend/cron.py diff --git a/backend/api/comments/resources.py b/backend/api/comments/resources.py index 7cb8b38435..79c4036ae5 100644 --- a/backend/api/comments/resources.py +++ b/backend/api/comments/resources.py @@ -195,8 +195,6 @@ async def delete( @router.post("/{project_id}/comments/tasks/{task_id}/") -# TODO Decorator -# @tm.pm_only(False) async def post( request: Request, project_id: int, diff --git a/backend/api/projects/resources.py b/backend/api/projects/resources.py index b648a419d1..c6f9528831 100644 --- a/backend/api/projects/resources.py +++ b/backend/api/projects/resources.py @@ -746,7 +746,6 @@ async def get( if user_id: user = await UserService.get_user_by_id(user_id, db) search_dto = setup_search_dto(request) - if search_dto.omit_map_results and search_dto.download_as_csv: return JSONResponse( content={ @@ -793,6 +792,8 @@ async def get( return JSONResponse(content={"Error": error_msg}, status_code=401) if search_dto.download_as_csv: + if user: + user = user.id all_results_csv = await ProjectSearchService.search_projects_as_csv( search_dto, user, db, True ) diff --git a/backend/api/tasks/statistics.py b/backend/api/tasks/statistics.py index c00459accc..08db6d416a 100644 --- a/backend/api/tasks/statistics.py +++ b/backend/api/tasks/statistics.py @@ -10,6 +10,7 @@ from backend.services.stats_service import StatsService from backend.services.users.authentication_service import login_required + router = APIRouter( prefix="/tasks", tags=["tasks"], diff --git a/backend/cron.py b/backend/cron.py deleted file mode 100644 index a087178c65..0000000000 --- a/backend/cron.py +++ /dev/null @@ -1,154 +0,0 @@ -import datetime - -from apscheduler.schedulers.asyncio import AsyncIOScheduler -from apscheduler.triggers.cron import CronTrigger -from apscheduler.triggers.interval import IntervalTrigger -from loguru import logger - -from backend.db import db_connection -from backend.models.postgis.task import Task - - -async def auto_unlock_tasks(): - async with db_connection.database.connection() as conn: - # Identify distinct project IDs that were touched in the last 2 hours - two_hours_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=120) - projects_query = """ - SELECT DISTINCT project_id - FROM task_history - WHERE action_date > :two_hours_ago - """ - projects = await conn.fetch_all( - query=projects_query, values={"two_hours_ago": two_hours_ago} - ) - for project in projects: - project_id = project["project_id"] - logger.info(f"Processing project_id: {project_id}") - await Task.auto_unlock_tasks(project_id, conn) - - -async def update_all_project_stats(): - """ - Async function to update project statistics in the database. - """ - async with db_connection.database.connection() as conn: - logger.info("Started updating project stats.") - await conn.execute("UPDATE users SET projects_mapped = NULL;") - projects_query = "SELECT DISTINCT id FROM projects;" - projects = await conn.fetch_all(query=projects_query) - for project in projects: - project_id = project["id"] - logger.info(f"Processing project ID: {project_id}") - - # Update project statistics - await conn.execute( - """ - UPDATE projects - SET total_tasks = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id), - tasks_mapped = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 2), - tasks_validated = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 4), - tasks_bad_imagery = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 6) - WHERE id = :project_id; - """, - {"project_id": project_id}, - ) - - # Update user stats - await conn.execute( - """ - UPDATE users - SET projects_mapped = array_append(projects_mapped, :project_id) - WHERE id IN ( - SELECT DISTINCT user_id - FROM task_history - WHERE action = 'STATE_CHANGE' AND project_id = :project_id - ); - """, - {"project_id": project_id}, - ) - - logger.info("Finished updating project stats.") - - -async def update_recent_updated_project_stats(): - """ - Async function to update project statistics for the recently updated projects in the database. - """ - async with db_connection.database.connection() as conn: - logger.info("Started updating recently updated projects' project stats.") - - # Calculate the cutoff date for the past week - one_week_ago = datetime.datetime.utcnow() - datetime.timedelta(days=7) - - # Fetch projects updated in the past week - projects_query = """ - SELECT DISTINCT id - FROM projects - WHERE last_updated > :one_week_ago; - """ - projects = await conn.fetch_all( - query=projects_query, values={"one_week_ago": one_week_ago} - ) - for project in projects: - project_id = project["id"] - logger.info(f"Processing project ID: {project_id}") - - # Update project statistics - await conn.execute( - """ - UPDATE projects - SET total_tasks = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id), - tasks_mapped = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 2), - tasks_validated = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 4), - tasks_bad_imagery = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 6) - WHERE id = :project_id; - """, - {"project_id": project_id}, - ) - - # Update user stats - await conn.execute( - """ - UPDATE users - SET projects_mapped = - CASE - WHEN :project_id = ANY(projects_mapped) THEN projects_mapped - ELSE array_append(projects_mapped, :project_id) - END - WHERE id IN ( - SELECT DISTINCT user_id - FROM task_history - WHERE action = 'STATE_CHANGE' AND project_id = :project_id - ); - """, - {"project_id": project_id}, - ) - - logger.info("Finished updating project stats.") - - -def setup_cron_jobs(): - scheduler = AsyncIOScheduler() - - scheduler.add_job( - auto_unlock_tasks, - IntervalTrigger(minutes=120), - id="auto_unlock_tasks", - replace_existing=True, - ) - - scheduler.add_job( - update_all_project_stats, - CronTrigger(hour=0, minute=0), # Cron trigger for 12:00 AM - id="update_project_stats", - replace_existing=True, - ) - - scheduler.add_job( - update_recent_updated_project_stats, - CronTrigger(minute=0), # Cron trigger for every hour - id="update_recent_updated_project_stats", - replace_existing=True, - ) - scheduler.start() - logger.info("Scheduler initialized: auto_unlock_tasks runs every 2 hours.") diff --git a/backend/models/dtos/project_dto.py b/backend/models/dtos/project_dto.py index 8fe71c0f1f..e709a0e060 100644 --- a/backend/models/dtos/project_dto.py +++ b/backend/models/dtos/project_dto.py @@ -563,8 +563,6 @@ class ProjectSummary(BaseModel): class Config: populate_by_name = True - # TODO: Make Validators work. - # @field_validator('mapping_types', 'mapping_editors', 'validation_editors', mode='plain') # def validate_list_fields(cls, v, field): # print(field,'-----') diff --git a/backend/models/dtos/validator_dto.py b/backend/models/dtos/validator_dto.py index 97f1235edc..1b9b6bf2fb 100644 --- a/backend/models/dtos/validator_dto.py +++ b/backend/models/dtos/validator_dto.py @@ -203,6 +203,7 @@ class RevertUserTasksDTO(BaseModel): user_id: int action_by: int action: str + # TODO: Incorporate this validator. # action: ExtendedStringType = Field( # validators=[is_valid_revert_status], converters=[str.upper] diff --git a/backend/models/postgis/project.py b/backend/models/postgis/project.py index 14ff1a1713..4c4bdabd75 100644 --- a/backend/models/postgis/project.py +++ b/backend/models/postgis/project.py @@ -1516,21 +1516,6 @@ async def get_project_summary( return summary - # TODO Remove if not used. - # @staticmethod - # async def calculate_tasks_percent(status: str, project_id: int, db: Database) -> float: - # """Calculate the percentage of tasks with a given status for a project.""" - # query = f""" - # SELECT COUNT(*) - # FROM tasks - # WHERE project_id = :project_id AND status = :status - # """ - # total_tasks_query = "SELECT COUNT(*) FROM tasks WHERE project_id = :project_id" - - # total_tasks = await db.fetch_val(total_tasks_query, {"project_id": project_id}) - # status_tasks = await db.fetch_val(query, {"project_id": project_id, "status": status}) - # return (status_tasks / total_tasks) * 100 if total_tasks > 0 else 0.0 - @staticmethod async def get_dto_for_locale( project_id: int, preferred_locale: str, default_locale: str, db: Database diff --git a/backend/models/postgis/user.py b/backend/models/postgis/user.py index 731ed1e024..7abe9c45f9 100644 --- a/backend/models/postgis/user.py +++ b/backend/models/postgis/user.py @@ -309,7 +309,6 @@ async def upsert_mapped_projects(user_id: int, project_id: int, db: Database): """ await db.execute(query, values={"user_id": user_id, "project_id": project_id}) - # TODO Optimization: Get only project name instead of all the locale attributes. @staticmethod async def get_mapped_projects( user_id: int, preferred_locale: str, db: Database diff --git a/backend/services/mapping_service.py b/backend/services/mapping_service.py index 66c58af6dd..807b9f50d5 100644 --- a/backend/services/mapping_service.py +++ b/backend/services/mapping_service.py @@ -108,7 +108,7 @@ async def lock_task_for_mapping( user_can_map, error_reason = await ProjectService.is_user_permitted_to_map( lock_task_dto.project_id, lock_task_dto.user_id, db ) - # TODO Handle error exceptions.. + # TODO Handle error exceptions. if not user_can_map: if error_reason == MappingNotAllowed.USER_NOT_ACCEPTED_LICENSE: raise UserLicenseError("User must accept license to map this task") diff --git a/backend/services/mapswipe_service.py b/backend/services/mapswipe_service.py index 00ea242434..91d8dc4d59 100644 --- a/backend/services/mapswipe_service.py +++ b/backend/services/mapswipe_service.py @@ -1,7 +1,7 @@ import json import requests -from cachetools import TTLCache, cached +# from cachetools import TTLCache, cached from backend.exceptions import Conflict from backend.models.dtos.partner_stats_dto import ( @@ -18,8 +18,8 @@ UserGroupMemberDTO, ) -grouped_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) -filtered_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) +# grouped_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) +# filtered_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) MAPSWIPE_API_URL = "https://api.mapswipe.org/graphql/" @@ -290,7 +290,7 @@ def setup_filtered_dto( filtered_stats_dto.contributions_by_organization_name = organizations return filtered_stats_dto - @cached(grouped_partner_stats_cache) + # @cached(grouped_partner_stats_cache) def fetch_grouped_partner_stats( self, partner_id: int, @@ -316,7 +316,7 @@ def fetch_grouped_partner_stats( group_dto = self.setup_group_dto(partner_id, group_id, resp_body) return group_dto - @cached(filtered_partner_stats_cache) + # @cached(filtered_partner_stats_cache) def fetch_filtered_partner_stats( self, partner_id: str, diff --git a/backend/services/messaging/message_service.py b/backend/services/messaging/message_service.py index 9a513430dc..d7d85e40c4 100644 --- a/backend/services/messaging/message_service.py +++ b/backend/services/messaging/message_service.py @@ -4,7 +4,7 @@ from typing import List import bleach -from cachetools import TTLCache, cached +from cachetools import TTLCache from databases import Database from loguru import logger from markdown import markdown @@ -783,8 +783,8 @@ async def _parse_message_for_username( ) return list(set(usernames)) + # @cached(message_cache) @staticmethod - @cached(message_cache) async def has_user_new_messages(user_id: int, db: Database) -> dict: """Determines if the user has any unread messages""" count = await Notification.get_unread_message_count(user_id, db) diff --git a/backend/services/project_search_service.py b/backend/services/project_search_service.py index a7157937cc..43f7195317 100644 --- a/backend/services/project_search_service.py +++ b/backend/services/project_search_service.py @@ -4,7 +4,7 @@ import geojson import pandas as pd -from cachetools import TTLCache, cached +from aiocache import cached, Cache from databases import Database from fastapi import HTTPException from geoalchemy2 import shape @@ -34,8 +34,8 @@ ) from backend.services.users.user_service import UserService -search_cache = TTLCache(maxsize=128, ttl=300) -csv_download_cache = TTLCache(maxsize=16, ttl=600) +# search_cache = TTLCache(maxsize=128, ttl=300) +# csv_download_cache = TTLCache(maxsize=16, ttl=600) # max area allowed for passed in bbox, calculation shown to help future maintenance # client resolution (mpp)* arbitrary large map size on a large screen in pixels * 50% buffer, all squared @@ -238,11 +238,17 @@ async def get_total_contributions( return [row["total"] for row in result] + def csv_cache_key_builder(func, *args, **kwargs): + args_without_db = args[:-2] + return f"{func.__name__}:{args_without_db}:{kwargs}" + @staticmethod - # @cached(csv_download_cache) + @cached(cache=Cache.MEMORY, key_builder=csv_cache_key_builder, ttl=3600) async def search_projects_as_csv( search_dto: ProjectSearchDTO, user, db: Database, as_csv: bool = False ) -> str: + if user: + user = await UserService.get_user_by_id(user, db) all_results = await ProjectSearchService._filter_projects( search_dto, user, db, as_csv ) @@ -326,7 +332,7 @@ async def search_projects_as_csv( return df.to_csv(index=False) @staticmethod - @cached(search_cache) + # @cached(cache=Cache.MEMORY, key_builder=cache_key_builder, ttl=300) async def search_projects( search_dto: ProjectSearchDTO, user, db ) -> ProjectSearchResultsDTO: @@ -378,7 +384,6 @@ async def _filter_projects( ) # Initialize filter list and parameters dictionary filters = [] - if search_dto.preferred_locale or search_dto.text_search: subquery_filters = [] if search_dto.preferred_locale: diff --git a/backend/services/project_service.py b/backend/services/project_service.py index 9acd065613..f33d2057a6 100644 --- a/backend/services/project_service.py +++ b/backend/services/project_service.py @@ -2,7 +2,8 @@ from datetime import datetime, timedelta, timezone import geojson -from cachetools import TTLCache, cached + +# from cachetools import TTLCache, cached from databases import Database from fastapi import HTTPException from loguru import logger @@ -38,8 +39,10 @@ from backend.services.project_search_service import ProjectSearchService from backend.services.team_service import TeamService from backend.services.users.user_service import UserService +from aiocache import cached, Cache + -summary_cache = TTLCache(maxsize=1024, ttl=600) +# summary_cache = TTLCache(maxsize=1024, ttl=600) class ProjectServiceError(Exception): @@ -312,7 +315,6 @@ async def get_task_details_for_logged_in_user( if len(tasks) == 0: raise NotFound(sub_code="TASK_NOT_FOUND") - # TODO put the task details in to a DTO dtos = [] for task in tasks: dtos.append( @@ -520,8 +522,12 @@ async def is_user_permitted_to_validate( return True, "User allowed to validate" + def summary_cache_key_builder(func, *args, **kwargs): + args_without_db = args[:-1] + return f"{func.__name__}:{args_without_db}:{kwargs}" + @staticmethod - @cached(summary_cache) + @cached(cache=Cache.MEMORY, key_builder=summary_cache_key_builder, ttl=600) def get_cached_project_summary( project_id: int, preferred_locale: str = "en" ) -> ProjectSummary: @@ -647,7 +653,6 @@ async def get_featured_projects( ) for project, total_contributors in zip_items ] - # TODO Check if pagination needed. dto.pagination = None return dto @@ -672,8 +677,12 @@ def get_project_title(project_id: int, preferred_locale: str = "en") -> str: project = ProjectService.get_project_by_id(project_id) return project.get_project_title(preferred_locale) + def stats_cache_key_builder(func, *args, **kwargs): + args_without_db = args[:-1] + return f"{func.__name__}:{args_without_db}:{kwargs}" + @staticmethod - @cached(TTLCache(maxsize=1024, ttl=600)) + @cached(cache=Cache.MEMORY, key_builder=stats_cache_key_builder, ttl=600) async def get_project_stats(project_id: int, db: Database) -> ProjectStatsDTO: """Gets the project stats DTO""" project = await ProjectService.exists(project_id, db) diff --git a/backend/services/recommendation_service.py b/backend/services/recommendation_service.py index 2ffc90adc5..5a2abe2612 100644 --- a/backend/services/recommendation_service.py +++ b/backend/services/recommendation_service.py @@ -10,6 +10,7 @@ from backend.models.postgis.statuses import ProjectStatus from backend.services.project_search_service import ProjectSearchService from backend.services.users.user_service import UserService +from aiocache import cached, Cache similar_projects_cache = TTLCache(maxsize=1000, ttl=60 * 60 * 24) # 24 hours @@ -117,14 +118,17 @@ def get_similar_project_ids(all_projects_df, target_project_df): return similar_projects - # TODO: Cache + def matrix_cache_key_builder(func, *args, **kwargs): + # Remove the last two arguments + args_without_db = args[:-1] + return f"{func.__name__}:{args_without_db}:{kwargs}" + # This function is cached so that the matrix is not calculated every time # as it is expensive and not changing often - # # @cached(cache=similar_projects_cache) @staticmethod + @cached(cache=Cache.MEMORY, key_builder=matrix_cache_key_builder, ttl=3600) async def create_project_matrix(db: Database) -> pd.DataFrame: """Creates project matrix required to calculate similarity.""" - # Query to fetch all published projects with their related data query = """ SELECT p.id, p.default_locale, p.difficulty, p.mapping_types, p.country, diff --git a/backend/services/stats_service.py b/backend/services/stats_service.py index feb3cc7b44..b9a163db8e 100644 --- a/backend/services/stats_service.py +++ b/backend/services/stats_service.py @@ -30,6 +30,7 @@ from backend.services.project_search_service import ProjectSearchService from backend.services.project_service import ProjectService from backend.services.users.user_service import UserService +from aiocache import cached, Cache homepage_stats_cache = TTLCache(maxsize=4, ttl=30) @@ -441,14 +442,17 @@ async def get_user_contributions( return contrib_dto + def homepage_cache_key_builder(func, *args, **kwargs): + args_without_db = args[:-1] + return f"{func.__name__}:{args_without_db}:{kwargs}" + @staticmethod - @cached(homepage_stats_cache) + @cached(cache=Cache.MEMORY, key_builder=homepage_cache_key_builder, ttl=600) async def get_homepage_stats( abbrev: bool = True, db: Database = None ) -> HomePageStatsDTO: """Get overall TM stats to give community a feel for progress that's being made""" dto = HomePageStatsDTO() - # Total Projects query = select(func.count(Project.id)) dto.total_projects = await db.fetch_val(query) @@ -690,7 +694,12 @@ def set_task_stats(row): "bad_imagery": row["bad_imagery"], } + def cache_key_builder(func, *args, **kwargs): + args_without_first = args[1:] + return f"{func.__name__}:{args_without_first}:{kwargs}" + @staticmethod + @cached(cache=Cache.MEMORY, key_builder=cache_key_builder, ttl=300) async def get_task_stats( db: Database, start_date, @@ -702,7 +711,6 @@ async def get_task_stats( country=None, ): """Creates task stats for a period using the TaskStatsDTO""" - # Base query components base_query = """ WITH filtered_projects AS ( diff --git a/backend/services/users/user_service.py b/backend/services/users/user_service.py index d0e0491d1c..84146a6776 100644 --- a/backend/services/users/user_service.py +++ b/backend/services/users/user_service.py @@ -1,6 +1,6 @@ import datetime -from cachetools import TTLCache, cached +# from cachetools import TTLCache, cached from databases import Database from loguru import logger from sqlalchemy import and_, desc, distinct, func, insert, select @@ -38,7 +38,7 @@ settings = Settings() -user_filter_cache = TTLCache(maxsize=1024, ttl=600) +# user_filter_cache = TTLCache(maxsize=1024, ttl=600) class UserServiceError(Exception): @@ -545,7 +545,7 @@ async def get_all_users(query: UserSearchQuery, db: Database) -> UserSearchDTO: return await User.get_all_users(query, db) @staticmethod - @cached(user_filter_cache) + # @cached(user_filter_cache) async def filter_users( username: str, project_id: int, page: int, db: Database ) -> UserFilterDTO: diff --git a/pdm.lock b/pdm.lock index 569144b424..f852fa9f2b 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,16 @@ groups = ["default", "dev", "lint", "test"] strategy = [] lock_version = "4.4.1" -content_hash = "sha256:0c3f5bf9e08b44fa7b24a65dc72716d4f297478e41d98d4de907c0c2ec9a823d" +content_hash = "sha256:6af7b056686ee691c97cd187e17063cc70d7d7fbd5eec2cd284fb91a09737881" + +[[package]] +name = "aiocache" +version = "0.12.3" +summary = "multi backend asyncio cache" +files = [ + {file = "aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d"}, + {file = "aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713"}, +] [[package]] name = "aiosmtplib" @@ -22,7 +31,6 @@ name = "alembic" version = "1.11.1" requires_python = ">=3.7" summary = "A database migration tool for SQLAlchemy." -groups = ["default"] dependencies = [ "Mako", "SQLAlchemy>=1.3.0", @@ -64,7 +72,6 @@ name = "apscheduler" version = "3.10.1" requires_python = ">=3.6" summary = "In-process task scheduler with Cron-like capabilities" -groups = ["default"] dependencies = [ "pytz", "setuptools>=0.7", @@ -127,7 +134,6 @@ name = "black" version = "23.7.0" requires_python = ">=3.8" summary = "The uncompromising code formatter." -groups = ["lint"] dependencies = [ "click>=8.0.0", "mypy-extensions>=0.4.3", @@ -135,6 +141,7 @@ dependencies = [ "pathspec>=0.9.0", "platformdirs>=2", "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=3.10.0.0; python_version < \"3.10\"", ] files = [ {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, @@ -161,7 +168,6 @@ name = "bleach" version = "6.0.0" requires_python = ">=3.7" summary = "An easy safelist-based HTML-sanitizing tool." -groups = ["default"] dependencies = [ "six>=1.9.0", "webencodings", @@ -186,7 +192,6 @@ name = "cachetools" version = "5.3.1" requires_python = ">=3.7" summary = "Extensible memoizing collections and decorators" -groups = ["default"] files = [ {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, @@ -197,7 +202,6 @@ name = "certifi" version = "2024.2.2" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default"] files = [ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, @@ -208,7 +212,6 @@ name = "charset-normalizer" version = "3.3.2" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -274,7 +277,6 @@ name = "coverage" version = "7.2.7" requires_python = ">=3.7" summary = "Code coverage measurement for Python" -groups = ["test"] files = [ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, @@ -352,8 +354,6 @@ name = "exceptiongroup" version = "1.2.1" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" -groups = ["test"] -marker = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, @@ -398,7 +398,6 @@ name = "flake8" version = "6.1.0" requires_python = ">=3.8.1" summary = "the modular source code checker: pep8 pyflakes and co" -groups = ["lint"] dependencies = [ "mccabe<0.8.0,>=0.7.0", "pycodestyle<2.12.0,>=2.11.0", @@ -414,7 +413,6 @@ name = "geoalchemy2" version = "0.14.3" requires_python = ">=3.7" summary = "Using SQLAlchemy with Spatial Databases" -groups = ["default"] dependencies = [ "SQLAlchemy>=1.4", "packaging", @@ -429,7 +427,6 @@ name = "geojson" version = "3.1.0" requires_python = ">=3.7" summary = "Python bindings and utilities for GeoJSON" -groups = ["default"] files = [ {file = "geojson-3.1.0-py3-none-any.whl", hash = "sha256:68a9771827237adb8c0c71f8527509c8f5bef61733aa434cefc9c9d4f0ebe8f3"}, {file = "geojson-3.1.0.tar.gz", hash = "sha256:58a7fa40727ea058efc28b0e9ff0099eadf6d0965e04690830208d3ef571adac"}, @@ -440,7 +437,6 @@ name = "gevent" version = "22.10.2" requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5" summary = "Coroutine-based network library" -groups = ["default"] dependencies = [ "greenlet>=2.0.0; platform_python_implementation == \"CPython\"", "setuptools", @@ -478,7 +474,6 @@ name = "greenlet" version = "2.0.2" requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" summary = "Lightweight in-process concurrent programming" -groups = ["default"] files = [ {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, @@ -514,7 +509,6 @@ name = "gunicorn" version = "20.1.0" requires_python = ">=3.5" summary = "WSGI HTTP Server for UNIX" -groups = ["default"] dependencies = [ "setuptools>=3.0", ] @@ -529,7 +523,6 @@ version = "20.1.0" extras = ["gevent"] requires_python = ">=3.5" summary = "WSGI HTTP Server for UNIX" -groups = ["default"] dependencies = [ "gevent>=1.4.0", "gunicorn==20.1.0", @@ -554,7 +547,6 @@ name = "idna" version = "3.7" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -565,7 +557,6 @@ name = "importlib-metadata" version = "6.8.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" -groups = ["default"] dependencies = [ "zipp>=0.5", ] @@ -579,7 +570,6 @@ name = "iniconfig" version = "2.0.0" requires_python = ">=3.7" summary = "brain-dead simple config-ini parsing" -groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -590,7 +580,6 @@ name = "itsdangerous" version = "2.1.2" requires_python = ">=3.7" summary = "Safely pass data to untrusted environments and back." -groups = ["default"] files = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, @@ -634,10 +623,8 @@ name = "mako" version = "1.3.3" requires_python = ">=3.8" summary = "A super-fast templating language that borrows the best ideas from the existing templating languages." -groups = ["default"] dependencies = [ "MarkupSafe>=0.9.2", - "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"}, @@ -649,6 +636,9 @@ name = "markdown" version = "3.4.4" requires_python = ">=3.7" summary = "Python implementation of John Gruber's Markdown." +dependencies = [ + "importlib-metadata>=4.4; python_version < \"3.10\"", +] files = [ {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, @@ -659,7 +649,6 @@ name = "markupsafe" version = "2.1.5" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." -groups = ["default"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -699,7 +688,6 @@ name = "mccabe" version = "0.7.0" requires_python = ">=3.6" summary = "McCabe checker, plugin for flake8" -groups = ["lint"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -710,7 +698,6 @@ name = "mypy-extensions" version = "1.0.0" requires_python = ">=3.5" summary = "Type system extensions for programs checked with the mypy type checker." -groups = ["lint"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -721,7 +708,6 @@ name = "newrelic" version = "8.8.0" requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" summary = "New Relic Python Agent" -groups = ["default"] files = [ {file = "newrelic-8.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b4db0e7544232d4e6e835a02ee28637970576f8dce82ffcaa3d675246e822d5"}, {file = "newrelic-8.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9355f209ba8d82fd0f9d78d7cc1d9bef0ae4677b3cfed7b7aaec521adbe87559"}, @@ -737,7 +723,6 @@ name = "numpy" version = "1.26.4" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" -groups = ["default"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -774,7 +759,6 @@ name = "oauthlib" version = "3.2.2" requires_python = ">=3.6" summary = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -groups = ["default"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -785,7 +769,6 @@ name = "packaging" version = "24.0" requires_python = ">=3.7" summary = "Core utilities for Python packages" -groups = ["default", "lint", "test"] files = [ {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, @@ -796,7 +779,6 @@ name = "pandas" version = "2.2.2" requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" -groups = ["default"] dependencies = [ "numpy>=1.22.4; python_version < \"3.11\"", "python-dateutil>=2.8.2", @@ -833,7 +815,6 @@ name = "pathspec" version = "0.12.1" requires_python = ">=3.8" summary = "Utility library for gitignore style pattern matching of file paths." -groups = ["lint"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -854,10 +835,6 @@ name = "pluggy" version = "1.5.0" requires_python = ">=3.8" summary = "plugin and hook calling mechanisms for python" -groups = ["test"] -dependencies = [ - "importlib-metadata>=0.12; python_version < \"3.8\"", -] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -868,7 +845,6 @@ name = "pycodestyle" version = "2.11.1" requires_python = ">=3.8" summary = "Python style guide checker" -groups = ["lint"] files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -987,7 +963,6 @@ name = "pyflakes" version = "3.1.0" requires_python = ">=3.8" summary = "passive checker of Python programs" -groups = ["lint"] files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, @@ -1037,7 +1012,6 @@ name = "pytest" version = "7.4.0" requires_python = ">=3.7" summary = "pytest: simple powerful testing with Python" -groups = ["test"] dependencies = [ "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", "iniconfig", @@ -1055,7 +1029,6 @@ name = "python-dateutil" version = "2.8.2" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" -groups = ["default"] dependencies = [ "six>=1.5", ] @@ -1069,7 +1042,6 @@ name = "python-dotenv" version = "1.0.0" requires_python = ">=3.8" summary = "Read key-value pairs from a .env file and set them as environment variables" -groups = ["default"] files = [ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, @@ -1080,7 +1052,6 @@ name = "python-slugify" version = "8.0.1" requires_python = ">=3.7" summary = "A Python slugify application that also handles Unicode" -groups = ["default"] dependencies = [ "text-unidecode>=1.3", ] @@ -1093,7 +1064,6 @@ files = [ name = "pytz" version = "2024.1" summary = "World timezone definitions, modern and historical" -groups = ["default"] files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, @@ -1104,7 +1074,6 @@ name = "requests" version = "2.31.0" requires_python = ">=3.7" summary = "Python HTTP for Humans." -groups = ["default"] dependencies = [ "certifi>=2017.4.17", "charset-normalizer<4,>=2", @@ -1121,7 +1090,6 @@ name = "requests-oauthlib" version = "1.3.1" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "OAuthlib authentication support for Requests." -groups = ["default"] dependencies = [ "oauthlib>=3.0.0", "requests>=2.0.0", @@ -1136,7 +1104,6 @@ name = "scikit-learn" version = "1.4.2" requires_python = ">=3.9" summary = "A set of python modules for machine learning and data mining" -groups = ["default"] dependencies = [ "joblib>=1.2.0", "numpy>=1.19.5", @@ -1167,7 +1134,6 @@ name = "scipy" version = "1.13.0" requires_python = ">=3.9" summary = "Fundamental algorithms for scientific computing in Python" -groups = ["default"] dependencies = [ "numpy<2.3,>=1.22.4", ] @@ -1197,7 +1163,6 @@ files = [ name = "sentry-sdk" version = "1.26.0" summary = "Python client for Sentry (https://sentry.io)" -groups = ["default"] dependencies = [ "certifi", "urllib3>=1.26.11; python_version >= \"3.6\"", @@ -1212,7 +1177,6 @@ name = "sentry-sdk" version = "1.26.0" extras = ["fastapi"] summary = "Python client for Sentry (https://sentry.io)" -groups = ["default"] dependencies = [ "fastapi>=0.79.0", "sentry-sdk==1.26.0", @@ -1227,7 +1191,6 @@ name = "setuptools" version = "69.5.1" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" -groups = ["default"] files = [ {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, @@ -1238,7 +1201,6 @@ name = "shapely" version = "2.0.1" requires_python = ">=3.7" summary = "Manipulation and analysis of geometric objects" -groups = ["default"] dependencies = [ "numpy>=1.14", ] @@ -1274,7 +1236,6 @@ name = "six" version = "1.16.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python 2 and 3 compatibility utilities" -groups = ["default"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -1295,7 +1256,6 @@ name = "sqlalchemy" version = "2.0.19" requires_python = ">=3.7" summary = "Database Abstraction Library" -groups = ["default"] dependencies = [ "greenlet!=0.4.17; platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\"", "typing-extensions>=4.2.0", @@ -1336,6 +1296,7 @@ requires_python = ">=3.8" summary = "The little ASGI library that shines." dependencies = [ "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", ] files = [ {file = "starlette-0.32.0.post1-py3-none-any.whl", hash = "sha256:cd0cb10ddb49313f609cedfac62c8c12e56c7314b66d89bb077ba228bada1b09"}, @@ -1346,7 +1307,6 @@ files = [ name = "text-unidecode" version = "1.3" summary = "The most basic Text::Unidecode port" -groups = ["default"] files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, @@ -1357,7 +1317,6 @@ name = "threadpoolctl" version = "3.5.0" requires_python = ">=3.8" summary = "threadpoolctl" -groups = ["default"] files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -1368,8 +1327,6 @@ name = "tomli" version = "2.0.1" requires_python = ">=3.7" summary = "A lil' TOML parser" -groups = ["lint", "test"] -marker = "python_version < \"3.11\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -1390,7 +1347,6 @@ name = "tzdata" version = "2024.1" requires_python = ">=2" summary = "Provider of IANA time zone data" -groups = ["default"] files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, @@ -1411,7 +1367,6 @@ name = "urllib3" version = "2.2.1" requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default"] files = [ {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, @@ -1435,7 +1390,6 @@ files = [ name = "webencodings" version = "0.5.1" summary = "Character encoding aliases for legacy web content" -groups = ["default"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -1446,7 +1400,6 @@ name = "werkzeug" version = "2.3.6" requires_python = ">=3.8" summary = "The comprehensive WSGI web application library." -groups = ["default"] dependencies = [ "MarkupSafe>=2.1.1", ] @@ -1460,7 +1413,6 @@ name = "zipp" version = "3.18.1" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["default"] files = [ {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, @@ -1471,7 +1423,6 @@ name = "zope-event" version = "5.0" requires_python = ">=3.7" summary = "Very basic event publishing system" -groups = ["default"] dependencies = [ "setuptools", ] @@ -1485,7 +1436,6 @@ name = "zope-interface" version = "6.3" requires_python = ">=3.7" summary = "Interfaces for Python" -groups = ["default"] dependencies = [ "setuptools", ] diff --git a/pyproject.toml b/pyproject.toml index 8927b13246..2adbbf53b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,7 @@ dependencies = [ "newrelic==8.8.0", "databases>=0.9.0", "fastapi-mail==1.4.1", + "aiocache>=0.12.3", ] requires-python = ">=3.9,<=3.11" readme = "README.md"