Skip to content

Commit

Permalink
Merge pull request #1724 from ChildMindInstitute/release/2025.02.0
Browse files Browse the repository at this point in the history
Release/2025.02.0 [main]
  • Loading branch information
ChaconC authored Feb 6, 2025
2 parents cc49ed0 + 119cd2b commit 5520355
Show file tree
Hide file tree
Showing 33 changed files with 780 additions and 945 deletions.
11 changes: 0 additions & 11 deletions .env.default
Original file line number Diff line number Diff line change
Expand Up @@ -82,17 +82,6 @@ RABBITMQ__URL=rabbitmq
# Secret key for data encryption. Use this key only for local development
SECRETS__SECRET_KEY=0eb7f5d4c1367199c21e9a2ec793b5a481b60fe2af24464bcb18ac7fa48a645f

# Opentelemetry local settings
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST=".*"
OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true
OTEL_SERVICE_NAME=mindlogger_backend
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=
# Variables below need only for AWS X-Ray
# OTEL_PROPAGATORS=
# OTEL_PYTHON_ID_GENERATOR=
# OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://otel:4317
# OTEL_TRACES_EXPORTER
# OTEL_EXPORTER_OTLP_CERTIFICATE

MULTI_INFORMANT__TEMP_RELATION_EXPIRY_SECS=86400

Expand Down
19 changes: 0 additions & 19 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,25 +19,6 @@ httpx = "==0.28.*"
jinja2 = "==3.1.*"
more-itertools = "==10.5.0"
nh3 = "==0.2.20"
opentelemetry-api = "==1.27.0"
opentelemetry-distro = "==0.48b0"
opentelemetry-exporter-otlp = "==1.27.0"
opentelemetry-instrumentation = "==0.48b0"
opentelemetry-instrumentation-asgi = "==0.48b0"
opentelemetry-instrumentation-asyncio = "==0.48b0"
opentelemetry-instrumentation-dbapi = "==0.48b0"
opentelemetry-instrumentation-fastapi = "==0.48b0"
opentelemetry-instrumentation-logging = "==0.48b0"
opentelemetry-instrumentation-sqlite3 = "==0.48b0"
opentelemetry-instrumentation-tortoiseorm = "==0.48b0"
opentelemetry-instrumentation-urllib = "==0.48b0"
opentelemetry-instrumentation-wsgi = "==0.48b0"
opentelemetry-propagator-aws-xray = "==1.0.2"
opentelemetry-sdk = "==1.27.0"
opentelemetry-sdk-extension-aws = "==2.0.2"
opentelemetry-semantic-conventions = "==0.48b0"
opentelemetry-test-utils = "==0.48b0"
opentelemetry-util-http = "==0.48b0"
pydantic = { extras = ["email"], version = "==1.10.18" }
pyjwt = "==2.10.1"
pymongo = "==4.10.1"
Expand Down
1,175 changes: 493 additions & 682 deletions Pipfile.lock

Large diffs are not rendered by default.

19 changes: 0 additions & 19 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -758,22 +758,3 @@ In case of Azure blob, specify your connection string into field `storage_secret
Common Public Attribution License Version 1.0 (CPAL-1.0)

Refer to [LICENSE.md](./LICENSE.MD)

## Opentelemtry
### If app is running in docker
- Make sure that `OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://opentelemetry:4317` endpoint has been already set in `.env`. Run docker container with opentelemetry:
```bash
docker-compose up -d opentelemetry
```
### If app is running locally
- Make sure that `OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317` is exported in environment.
```bash
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317
```
or if you use pipenv for autoloading envs - make sure that `OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317` is added to `.env` file.
- The same as for containerized app - up container with opentelemetry
```
```bash
docker-compose up -d opentelemetry
```
- Start you app
21 changes: 14 additions & 7 deletions compose/fastapi/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -40,23 +40,30 @@ COPY ./ ./
COPY --chown=code:code ./compose/fastapi/entrypoint /fastapi-entrypoint
RUN sed -i 's/\r$//g' /fastapi-entrypoint && chmod +x /fastapi-entrypoint

COPY --chown=code:code ./compose/fastapi/start /fastapi-start
RUN sed -i 's/\r$//g' /fastapi-start && chmod +x /fastapi-start

COPY --chown=code:code ./compose/fastapi/start-datadog /fastapi-start-datadog
RUN sed -i 's/\r$//g' /fastapi-start-datadog && chmod +x /fastapi-start-datadog


# Copy to fastapi-start and fastapi-start-datadog to not break current ECS task config
COPY --chown=code:code ./compose/fastapi/start-backend-datadog /start-backend-datadog
RUN sed -i 's/\r$//g' /start-backend-datadog && chmod +x /start-backend-datadog && \
cp /start-backend-datadog /fastapi-start && cp /start-backend-datadog /fastapi-start-datadog

# Migration script
COPY --chown=code:code ./compose/fastapi/migrate /fastapi-migrate
RUN sed -i 's/\r$//g' /fastapi-migrate && chmod +x /fastapi-migrate

# ECS
COPY --chown=code:code ./compose/fastapi/ecs-start /ecs-start
RUN sed -i 's/\r$//g' /ecs-start && chmod +x /ecs-start

COPY --chown=code:code ./compose/fastapi/ecs-start-feature /ecs-start-feature
RUN sed -i 's/\r$//g' /ecs-start-feature && chmod +x /ecs-start-feature

# Scheduler and worker
COPY --chown=code:code ./compose/fastapi/start-worker /start-worker
RUN sed -i 's/\r$//g' /start-worker && chmod +x /start-worker

COPY --chown=code:code ./compose/fastapi/start-scheduler /start-scheduler
RUN sed -i 's/\r$//g' /start-worker && chmod +x /start-scheduler


# Select internal user
USER code

Expand Down
3 changes: 3 additions & 0 deletions compose/fastapi/ecs-start
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
#!/bin/bash

# Use this script to start on ECS and run a migration
# May be deprecated

set -eo pipefail
set -o nounset

Expand Down
4 changes: 3 additions & 1 deletion compose/fastapi/ecs-start-feature
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
#!/bin/bash

# Startup script for feature branches on ECS Fargate with DataDog enabled

set -eo pipefail
set -o nounset

ENV=testing /fastapi-migrate
/fastapi-start-datadog
/start-backend-datadog
16 changes: 0 additions & 16 deletions compose/fastapi/start

This file was deleted.

20 changes: 20 additions & 0 deletions compose/fastapi/start-backend-datadog
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/bin/bash

if [ "${DD_TRACE_ENABLED}" == "true" ]; then
TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600") && \
export DD_AGENT_HOST=$(curl http://169.254.169.254/latest/meta-data/local-ipv4 -H "X-aws-ec2-metadata-token: $TOKEN")
export LOG_JSON_FORMAT=true
fi

set -o errexit
set -o pipefail
set -o nounset

# https://www.uvicorn.org/settings/
export UVICORN_HOST="0.0.0.0"
export UVICORN_PORT=80

uvicorn main:app \
--host ${UVICORN_HOST} --port ${UVICORN_PORT} \
--reload --proxy-headers \
--log-config uvicorn_disable_logging.json
21 changes: 0 additions & 21 deletions compose/fastapi/start-datadog

This file was deleted.

13 changes: 13 additions & 0 deletions compose/fastapi/start-scheduler
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

if [ "${DD_TRACE_ENABLED}" == "true" ]; then
TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600") && \
export DD_AGENT_HOST=$(curl http://169.254.169.254/latest/meta-data/local-ipv4 -H "X-aws-ec2-metadata-token: $TOKEN")
export LOG_JSON_FORMAT=true
fi

set -o errexit
set -o pipefail
set -o nounset

taskiq scheduler scheduler:scheduler -fsd
13 changes: 13 additions & 0 deletions compose/fastapi/start-worker
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

if [ "${DD_TRACE_ENABLED}" == "true" ]; then
TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600") && \
export DD_AGENT_HOST=$(curl http://169.254.169.254/latest/meta-data/local-ipv4 -H "X-aws-ec2-metadata-token: $TOKEN")
export LOG_JSON_FORMAT=true
fi

set -o errexit
set -o pipefail
set -o nounset

taskiq worker worker:worker -fsd
27 changes: 0 additions & 27 deletions compose/opentelemetry/otel-collector-config.yaml

This file was deleted.

17 changes: 5 additions & 12 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ services:
args:
- PIPENV_EXTRA_ARGS=--dev
entrypoint: /fastapi-entrypoint
command: /fastapi-start
command: /start-backend-datadog
env_file: .env
depends_on:
- postgres
Expand All @@ -68,7 +68,7 @@ services:
image: mindlogger_worker
container_name: mindlogger_worker
entrypoint: /fastapi-entrypoint
command: [ 'taskiq', 'worker', 'broker:broker', '-fsd' ]
command: ['/start-worker']
env_file: .env
volumes:
- ./:/app/
Expand All @@ -85,10 +85,10 @@ services:
target: base
args:
- PIPENV_EXTRA_ARGS=--dev
image: mindlogger_webapp
image: mindlogger_scheduler
container_name: mindlogger_scheduler
entrypoint: /fastapi-entrypoint
command: [ 'taskiq', 'scheduler', 'scheduler:scheduler', '-fsd' ]
command: ['/start-scheduler']
env_file: .env
volumes:
- ./:/app/
Expand Down Expand Up @@ -128,14 +128,7 @@ services:
- './compose/minio:/etc/minio'
entrypoint: /etc/minio/create_bucket.sh

opentelemetry:
image: otel/opentelemetry-collector
container_name: mindlogger_opentelemetry
command: ["--config=/etc/otel-collector-config.yaml"]
volumes:
- "./compose/opentelemetry/otel-collector-config.yaml:/etc/otel-collector-config.yaml"
ports:
- "4317:4317"


volumes:
pg_data: {}
Expand Down
1 change: 1 addition & 0 deletions src/apps/integrations/domain.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

class AvailableIntegrations(str, Enum):
LORIS = "LORIS"
PROLIFIC = "PROLIFIC"
FUTURE = "FUTURE"


Expand Down
Empty file.
17 changes: 17 additions & 0 deletions src/apps/integrations/prolific/domain.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import json

from pydantic import BaseModel

from apps.integrations.db.schemas import IntegrationsSchema


class ProlificIntegration(BaseModel):
api_key: str

@classmethod
def from_schema(cls, schema: IntegrationsSchema):
configuration = json.loads(schema.configuration.replace("'", '"'))
return cls(api_key=configuration["api_key"])

def __repr__(self):
return "ProlificIntegration()"
7 changes: 7 additions & 0 deletions src/apps/integrations/prolific/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from gettext import gettext as _

from apps.shared.exception import UnauthorizedError


class ProlificInvalidApiTokenError(UnauthorizedError):
message = _("Prolific token is invalid.")
Empty file.
39 changes: 39 additions & 0 deletions src/apps/integrations/prolific/service/prolific.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import uuid

import requests

from apps.integrations.crud.integrations import IntegrationsCRUD
from apps.integrations.db.schemas import IntegrationsSchema
from apps.integrations.domain import AvailableIntegrations
from apps.integrations.prolific.domain import ProlificIntegration
from apps.integrations.prolific.errors import ProlificInvalidApiTokenError
from apps.users.domain import User


class ProlificIntegrationService:
def __init__(self, applet_id: uuid.UUID, session, user: User) -> None:
self.applet_id = applet_id
self.session = session
self.user = user
self.type = AvailableIntegrations.PROLIFIC

async def create_prolific_integration(self, api_key: str) -> ProlificIntegration:
prolific_response = requests.get(
"https://api.prolific.com/api/v1/users/me/",
headers={"Authorization": f"Token {api_key}", "Content-Type": "application/json"},
)

if prolific_response.status_code != 200:
raise ProlificInvalidApiTokenError()

integration_schema = await IntegrationsCRUD(self.session).create(
IntegrationsSchema(
applet_id=self.applet_id,
type=self.type,
configuration={
"api_key": api_key,
},
)
)

return ProlificIntegration.from_schema(integration_schema)
Loading

0 comments on commit 5520355

Please sign in to comment.