Skip to content

Commit

Permalink
Merge pull request #6735 from hotosm/feat/aiocache
Browse files Browse the repository at this point in the history
feat: aiocache and caching implementation
  • Loading branch information
prabinoid authored Feb 7, 2025
2 parents e3292fe + d21ed02 commit 19efab9
Show file tree
Hide file tree
Showing 18 changed files with 75 additions and 269 deletions.
2 changes: 0 additions & 2 deletions backend/api/comments/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,6 @@ async def delete(


@router.post("/{project_id}/comments/tasks/{task_id}/")
# TODO Decorator
# @tm.pm_only(False)
async def post(
request: Request,
project_id: int,
Expand Down
3 changes: 2 additions & 1 deletion backend/api/projects/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -746,7 +746,6 @@ async def get(
if user_id:
user = await UserService.get_user_by_id(user_id, db)
search_dto = setup_search_dto(request)

if search_dto.omit_map_results and search_dto.download_as_csv:
return JSONResponse(
content={
Expand Down Expand Up @@ -793,6 +792,8 @@ async def get(
return JSONResponse(content={"Error": error_msg}, status_code=401)

if search_dto.download_as_csv:
if user:
user = user.id
all_results_csv = await ProjectSearchService.search_projects_as_csv(
search_dto, user, db, True
)
Expand Down
1 change: 1 addition & 0 deletions backend/api/tasks/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from backend.services.stats_service import StatsService
from backend.services.users.authentication_service import login_required


router = APIRouter(
prefix="/tasks",
tags=["tasks"],
Expand Down
154 changes: 0 additions & 154 deletions backend/cron.py

This file was deleted.

2 changes: 0 additions & 2 deletions backend/models/dtos/project_dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -563,8 +563,6 @@ class ProjectSummary(BaseModel):
class Config:
populate_by_name = True

# TODO: Make Validators work.

# @field_validator('mapping_types', 'mapping_editors', 'validation_editors', mode='plain')
# def validate_list_fields(cls, v, field):
# print(field,'-----')
Expand Down
1 change: 1 addition & 0 deletions backend/models/dtos/validator_dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ class RevertUserTasksDTO(BaseModel):
user_id: int
action_by: int
action: str

# TODO: Incorporate this validator.
# action: ExtendedStringType = Field(
# validators=[is_valid_revert_status], converters=[str.upper]
Expand Down
15 changes: 0 additions & 15 deletions backend/models/postgis/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1516,21 +1516,6 @@ async def get_project_summary(

return summary

# TODO Remove if not used.
# @staticmethod
# async def calculate_tasks_percent(status: str, project_id: int, db: Database) -> float:
# """Calculate the percentage of tasks with a given status for a project."""
# query = f"""
# SELECT COUNT(*)
# FROM tasks
# WHERE project_id = :project_id AND status = :status
# """
# total_tasks_query = "SELECT COUNT(*) FROM tasks WHERE project_id = :project_id"

# total_tasks = await db.fetch_val(total_tasks_query, {"project_id": project_id})
# status_tasks = await db.fetch_val(query, {"project_id": project_id, "status": status})
# return (status_tasks / total_tasks) * 100 if total_tasks > 0 else 0.0

@staticmethod
async def get_dto_for_locale(
project_id: int, preferred_locale: str, default_locale: str, db: Database
Expand Down
1 change: 0 additions & 1 deletion backend/models/postgis/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,6 @@ async def upsert_mapped_projects(user_id: int, project_id: int, db: Database):
"""
await db.execute(query, values={"user_id": user_id, "project_id": project_id})

# TODO Optimization: Get only project name instead of all the locale attributes.
@staticmethod
async def get_mapped_projects(
user_id: int, preferred_locale: str, db: Database
Expand Down
2 changes: 1 addition & 1 deletion backend/services/mapping_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ async def lock_task_for_mapping(
user_can_map, error_reason = await ProjectService.is_user_permitted_to_map(
lock_task_dto.project_id, lock_task_dto.user_id, db
)
# TODO Handle error exceptions..
# TODO Handle error exceptions.
if not user_can_map:
if error_reason == MappingNotAllowed.USER_NOT_ACCEPTED_LICENSE:
raise UserLicenseError("User must accept license to map this task")
Expand Down
10 changes: 5 additions & 5 deletions backend/services/mapswipe_service.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json

import requests
from cachetools import TTLCache, cached
# from cachetools import TTLCache, cached

from backend.exceptions import Conflict
from backend.models.dtos.partner_stats_dto import (
Expand All @@ -18,8 +18,8 @@
UserGroupMemberDTO,
)

grouped_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24)
filtered_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24)
# grouped_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24)
# filtered_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24)
MAPSWIPE_API_URL = "https://api.mapswipe.org/graphql/"


Expand Down Expand Up @@ -290,7 +290,7 @@ def setup_filtered_dto(
filtered_stats_dto.contributions_by_organization_name = organizations
return filtered_stats_dto

@cached(grouped_partner_stats_cache)
# @cached(grouped_partner_stats_cache)
def fetch_grouped_partner_stats(
self,
partner_id: int,
Expand All @@ -316,7 +316,7 @@ def fetch_grouped_partner_stats(
group_dto = self.setup_group_dto(partner_id, group_id, resp_body)
return group_dto

@cached(filtered_partner_stats_cache)
# @cached(filtered_partner_stats_cache)
def fetch_filtered_partner_stats(
self,
partner_id: str,
Expand Down
4 changes: 2 additions & 2 deletions backend/services/messaging/message_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import List

import bleach
from cachetools import TTLCache, cached
from cachetools import TTLCache
from databases import Database
from loguru import logger
from markdown import markdown
Expand Down Expand Up @@ -783,8 +783,8 @@ async def _parse_message_for_username(
)
return list(set(usernames))

# @cached(message_cache)
@staticmethod
@cached(message_cache)
async def has_user_new_messages(user_id: int, db: Database) -> dict:
"""Determines if the user has any unread messages"""
count = await Notification.get_unread_message_count(user_id, db)
Expand Down
17 changes: 11 additions & 6 deletions backend/services/project_search_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import geojson
import pandas as pd
from cachetools import TTLCache, cached
from aiocache import cached, Cache
from databases import Database
from fastapi import HTTPException
from geoalchemy2 import shape
Expand Down Expand Up @@ -34,8 +34,8 @@
)
from backend.services.users.user_service import UserService

search_cache = TTLCache(maxsize=128, ttl=300)
csv_download_cache = TTLCache(maxsize=16, ttl=600)
# search_cache = TTLCache(maxsize=128, ttl=300)
# csv_download_cache = TTLCache(maxsize=16, ttl=600)

# max area allowed for passed in bbox, calculation shown to help future maintenance
# client resolution (mpp)* arbitrary large map size on a large screen in pixels * 50% buffer, all squared
Expand Down Expand Up @@ -238,11 +238,17 @@ async def get_total_contributions(

return [row["total"] for row in result]

def csv_cache_key_builder(func, *args, **kwargs):
args_without_db = args[:-2]
return f"{func.__name__}:{args_without_db}:{kwargs}"

@staticmethod
# @cached(csv_download_cache)
@cached(cache=Cache.MEMORY, key_builder=csv_cache_key_builder, ttl=3600)
async def search_projects_as_csv(
search_dto: ProjectSearchDTO, user, db: Database, as_csv: bool = False
) -> str:
if user:
user = await UserService.get_user_by_id(user, db)
all_results = await ProjectSearchService._filter_projects(
search_dto, user, db, as_csv
)
Expand Down Expand Up @@ -326,7 +332,7 @@ async def search_projects_as_csv(
return df.to_csv(index=False)

@staticmethod
@cached(search_cache)
# @cached(cache=Cache.MEMORY, key_builder=cache_key_builder, ttl=300)
async def search_projects(
search_dto: ProjectSearchDTO, user, db
) -> ProjectSearchResultsDTO:
Expand Down Expand Up @@ -378,7 +384,6 @@ async def _filter_projects(
)
# Initialize filter list and parameters dictionary
filters = []

if search_dto.preferred_locale or search_dto.text_search:
subquery_filters = []
if search_dto.preferred_locale:
Expand Down
Loading

0 comments on commit 19efab9

Please sign in to comment.