diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a43c4af48..424f3d933 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,6 @@ jobs: lint: runs-on: ubuntu-latest steps: - - name: Checkout Code Repository uses: actions/checkout@v2 @@ -33,40 +32,38 @@ jobs: test: runs-on: ubuntu-latest steps: - - name: Checkout Code Repository uses: actions/checkout@v2 - name: Build the Stack - run: docker-compose build + run: docker-compose build - name: Run the Stack - run: docker-compose up -d + run: docker-compose up -d - name: Make DB Migrations - run: docker-compose run --rm web python manage.py migrate + run: docker-compose run --rm web python manage.py migrate - name: Run Django Tests - run: docker-compose run --rm web pytest --cov=app + run: docker-compose run --rm web pytest --cov=app - name: Tear down the Stack - run: docker-compose down + run: docker-compose down check-migrations: runs-on: ubuntu-latest steps: - - name: Checkout Code Repository uses: actions/checkout@v2 - name: Build the Stack - run: docker-compose build + run: docker-compose build - name: Run the Stack - run: docker-compose up -d + run: docker-compose up -d - name: Check for unstaged migrations - run: docker-compose run --rm web python manage.py makemigrations --check --no-input + run: docker-compose run --rm web python manage.py makemigrations --check --no-input - name: Tear down the Stack - run: docker-compose down \ No newline at end of file + run: docker-compose down diff --git a/.gitignore b/.gitignore index 5bcf46ad2..83aec8514 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,5 @@ mysite.log .coverage celerybeat-schedule + +**/target/ diff --git a/app/content/serializers/user.py b/app/content/serializers/user.py index fb7be4982..7c41beb9e 100644 --- a/app/content/serializers/user.py +++ b/app/content/serializers/user.py @@ -20,7 +20,6 @@ class Meta: "first_name", "last_name", "image", - "email", "gender", "study", "studyyear", @@ -30,7 +29,6 @@ class Meta: "first_name", "last_name", "image", - "email", "gender", "study", "studyyear", diff --git a/app/content/util/event_utils.py b/app/content/util/event_utils.py index 034b98cc4..f60ac0805 100644 --- a/app/content/util/event_utils.py +++ b/app/content/util/event_utils.py @@ -17,50 +17,49 @@ def create_payment_order(event, request, registration): and creates a new Vipps payment order. """ - if event.is_paid_event: - access_token = os.environ.get("PAYMENT_ACCESS_TOKEN") - expires_at = os.environ.get("PAYMENT_ACCESS_TOKEN_EXPIRES_AT") - if not access_token or datetime.now() >= datetime.fromtimestamp( - int(expires_at) - ): - (expires_at, access_token) = get_new_access_token() - os.environ.update({"PAYMENT_ACCESS_TOKEN": access_token}) - os.environ.update({"PAYMENT_ACCESS_TOKEN_EXPIRES_AT": str(expires_at)}) + access_token = os.environ.get("PAYMENT_ACCESS_TOKEN") + expires_at = os.environ.get("PAYMENT_ACCESS_TOKEN_EXPIRES_AT") + if not access_token or datetime.now() >= datetime.fromtimestamp( + int(expires_at) + ): + (expires_at, access_token) = get_new_access_token() + os.environ.update({"PAYMENT_ACCESS_TOKEN": access_token}) + os.environ.update({"PAYMENT_ACCESS_TOKEN_EXPIRES_AT": str(expires_at)}) - prev_orders = Order.objects.filter(event=event, user=request.user) - has_paid_order = False + prev_orders = Order.objects.filter(event=event, user=request.user) + has_paid_order = False - for order in prev_orders: - if ( - order.status == OrderStatus.CAPTURE - or order.status == OrderStatus.RESERVE - or order.status == OrderStatus.SALE - ): - has_paid_order = True - break + for order in prev_orders: + if ( + order.status == OrderStatus.CAPTURE + or order.status == OrderStatus.RESERVE + or order.status == OrderStatus.SALE + ): + has_paid_order = True + break - if not has_paid_order: + if not has_paid_order: - paytime = event.paid_information.paytime + paytime = event.paid_information.paytime - expire_date = datetime.now() + timedelta( - hours=paytime.hour, minutes=paytime.minute, seconds=paytime.second - ) + expire_date = datetime.now() + timedelta( + hours=paytime.hour, minutes=paytime.minute, seconds=paytime.second + ) - # Create Order - order_id = uuid.uuid4() - amount = int(event.paid_information.price * 100) - res = initiate_payment(amount, str(order_id), event.title, access_token) - payment_link = res["url"] - order = Order.objects.create( - order_id=order_id, - user=request.user, - event=event, - payment_link=payment_link, - expire_date=expire_date, - ) - order.save() - check_if_has_paid.apply_async( - args=(order.order_id, registration.registration_id), - countdown=(paytime.hour * 60 + paytime.minute) * 60 + paytime.second, - ) + # Create Order + order_id = uuid.uuid4() + amount = int(event.paid_information.price * 100) + res = initiate_payment(amount, str(order_id), event.title, access_token) + payment_link = res["url"] + order = Order.objects.create( + order_id=order_id, + user=request.user, + event=event, + payment_link=payment_link, + expire_date=expire_date, + ) + order.save() + check_if_has_paid.apply_async( + args=(order.order_id, registration.registration_id), + countdown=(paytime.hour * 60 + paytime.minute) * 60 + paytime.second, + ) diff --git a/app/content/views/registration.py b/app/content/views/registration.py index d899e2031..6c8dcb2ee 100644 --- a/app/content/views/registration.py +++ b/app/content/views/registration.py @@ -65,15 +65,16 @@ def create(self, request, *args, **kwargs): serializer, event=event, user=request.user ) - try: - create_payment_order(event, request, registration) - except Exception as e: - registration.delete() - raise e - - registration_serializer = RegistrationSerializer( - registration, context={"user": registration.user} - ) + if event.is_paid_event: + try: + create_payment_order(event, request, registration) + except Exception as e: + registration.delete() + raise e + + registration_serializer = RegistrationSerializer( + registration, context={"user": registration.user} + ) return Response(registration_serializer.data, status=status.HTTP_201_CREATED) diff --git a/app/settings.py b/app/settings.py index a6b0176ac..32ec17320 100644 --- a/app/settings.py +++ b/app/settings.py @@ -77,6 +77,7 @@ # greater consistency between gunicorn and `./manage.py runserver`. See: # http://whitenoise.evans.io/en/stable/django.html#using-whitenoise-in-development "whitenoise.runserver_nostatic", + "django_prometheus", "django.contrib.staticfiles", # Third party "rest_framework", @@ -128,6 +129,7 @@ } MIDDLEWARE = [ + "django_prometheus.middleware.PrometheusBeforeMiddleware", # Django Cors Headers "corsheaders.middleware.CorsMiddleware", "django.middleware.common.CommonMiddleware", @@ -141,6 +143,7 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "django.middleware.locale.LocaleMiddleware", + "django_prometheus.middleware.PrometheusAfterMiddleware", ] ROOT_URLCONF = "app.urls" @@ -170,7 +173,7 @@ DATABASES = { "default": { - "ENGINE": "django.db.backends.mysql", + "ENGINE": "django_prometheus.db.backends.mysql", "NAME": os.environ.get("DATABASE_NAME"), "USER": os.environ.get("DATABASE_USER"), "PASSWORD": os.environ.get("DATABASE_PASSWORD"), @@ -285,3 +288,14 @@ CELERY_BROKER_URL = "amqp://guest:guest@rabbitmq:5672" if ENVIRONMENT == EnvironmentOptions.LOCAL: CELERY_TASK_ALWAYS_EAGER = False + +PROMETHEUS_EXPORT_MIGRATIONS = os.environ.get("PROMETHEUS_EXPORT_MIGRATIONS", True) + +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://redis:6379", + "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, + "KEY_PREFIX": "leptos", + } +} diff --git a/app/urls.py b/app/urls.py index 2b540d666..2a11d1221 100644 --- a/app/urls.py +++ b/app/urls.py @@ -31,4 +31,5 @@ path("forms/", include("app.forms.urls")), path("galleries/", include("app.gallery.urls")), path("badges/", include("app.badge.urls")), + path("", include("django_prometheus.urls")), ] diff --git a/compose/Dockerfile b/compose/Dockerfile index 3e7579d93..5cf40b352 100644 --- a/compose/Dockerfile +++ b/compose/Dockerfile @@ -18,7 +18,8 @@ RUN apt-get update \ # Requirements are installed here to ensure they will be cached. COPY ./requirements.txt ./ -RUN pip install -r requirements.txt +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt COPY . . diff --git a/docker-compose.yml b/docker-compose.yml index 6bf689022..236bb2930 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: image: mysql:8.0 container_name: db platform: linux/amd64 - restart: always + restart: unless-stopped ports: - "3306:3306" environment: @@ -19,7 +19,7 @@ services: depends_on: - db image: phpmyadmin/phpmyadmin - restart: always + restart: unless-stopped ports: - "8080:80" environment: @@ -35,7 +35,7 @@ services: - ./.envs/.local image: web container_name: web - restart: on-failure + restart: unless-stopped platform: linux/amd64 volumes: - .:/usr/src/ @@ -57,3 +57,30 @@ services: container_name: rabbitmq ports: - 5672:5672 + + prometheus: + image: prom/prometheus + restart: "no" + volumes: + - prometheus_data:/prometheus + - ./prometheus.yml:/etc/prometheus/prometheus.yml + ports: + - 9090:9090 + depends_on: + - web + + grafana: + image: grafana/grafana + environment: + GF_INSTALL_PLUGINS: "grafana-clock-panel,grafana-simple-json-datasource" + restart: "no" + volumes: + - grafana_data:/var/lib/grafana + ports: + - 3000:3000 + depends_on: + - prometheus + +volumes: + prometheus_data: {} + grafana_data: {} diff --git a/grafana/alerting/1/__default__.tmpl b/grafana/alerting/1/__default__.tmpl new file mode 100644 index 000000000..b8633d168 --- /dev/null +++ b/grafana/alerting/1/__default__.tmpl @@ -0,0 +1,53 @@ + +{{ define "__subject" }}[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ if gt (.Alerts.Resolved | len) 0 }}, RESOLVED:{{ .Alerts.Resolved | len }}{{ end }}{{ end }}] {{ .GroupLabels.SortedPairs.Values | join " " }} {{ if gt (len .CommonLabels) (len .GroupLabels) }}({{ with .CommonLabels.Remove .GroupLabels.Names }}{{ .Values | join " " }}{{ end }}){{ end }}{{ end }} + +{{ define "__text_values_list" }}{{ if len .Values }}{{ $first := true }}{{ range $refID, $value := .Values -}} +{{ if $first }}{{ $first = false }}{{ else }}, {{ end }}{{ $refID }}={{ $value }}{{ end -}} +{{ else }}[no value]{{ end }}{{ end }} + +{{ define "__text_alert_list" }}{{ range . }} +Value: {{ template "__text_values_list" . }} +Labels: +{{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} +{{ end }}Annotations: +{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} +{{ end }}{{ if gt (len .GeneratorURL) 0 }}Source: {{ .GeneratorURL }} +{{ end }}{{ if gt (len .SilenceURL) 0 }}Silence: {{ .SilenceURL }} +{{ end }}{{ if gt (len .DashboardURL) 0 }}Dashboard: {{ .DashboardURL }} +{{ end }}{{ if gt (len .PanelURL) 0 }}Panel: {{ .PanelURL }} +{{ end }}{{ end }}{{ end }} + +{{ define "default.title" }}{{ template "__subject" . }}{{ end }} + +{{ define "default.message" }}{{ if gt (len .Alerts.Firing) 0 }}**Firing** +{{ template "__text_alert_list" .Alerts.Firing }}{{ if gt (len .Alerts.Resolved) 0 }} + +{{ end }}{{ end }}{{ if gt (len .Alerts.Resolved) 0 }}**Resolved** +{{ template "__text_alert_list" .Alerts.Resolved }}{{ end }}{{ end }} + + +{{ define "__teams_text_alert_list" }}{{ range . }} +Value: {{ template "__text_values_list" . }} +Labels: +{{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} +{{ end }} +Annotations: +{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} +{{ end }} +{{ if gt (len .GeneratorURL) 0 }}Source: [{{ .GeneratorURL }}]({{ .GeneratorURL }}) + +{{ end }}{{ if gt (len .SilenceURL) 0 }}Silence: [{{ .SilenceURL }}]({{ .SilenceURL }}) + +{{ end }}{{ if gt (len .DashboardURL) 0 }}Dashboard: [{{ .DashboardURL }}]({{ .DashboardURL }}) + +{{ end }}{{ if gt (len .PanelURL) 0 }}Panel: [{{ .PanelURL }}]({{ .PanelURL }}) + +{{ end }} +{{ end }}{{ end }} + + +{{ define "teams.default.message" }}{{ if gt (len .Alerts.Firing) 0 }}**Firing** +{{ template "__teams_text_alert_list" .Alerts.Firing }}{{ if gt (len .Alerts.Resolved) 0 }} + +{{ end }}{{ end }}{{ if gt (len .Alerts.Resolved) 0 }}**Resolved** +{{ template "__teams_text_alert_list" .Alerts.Resolved }}{{ end }}{{ end }} diff --git a/grafana/grafana.db b/grafana/grafana.db new file mode 100644 index 000000000..2c7189994 Binary files /dev/null and b/grafana/grafana.db differ diff --git a/profiling/stress_tester/Cargo.toml b/profiling/stress_tester/Cargo.toml new file mode 100644 index 000000000..90c4d1332 --- /dev/null +++ b/profiling/stress_tester/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "stress_tester" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +reqwest = "0.11.22" +serde = { version = "1.0.189", features = ["derive"] } +serde_json = "1.0.107" +tokio = { version = "1.33.0", features = ["full"] } diff --git a/profiling/stress_tester/src/main.rs b/profiling/stress_tester/src/main.rs new file mode 100644 index 000000000..c4c3d0f9b --- /dev/null +++ b/profiling/stress_tester/src/main.rs @@ -0,0 +1,78 @@ +use reqwest::{header, ClientBuilder}; +use reqwest::{Client, Result}; +use serde::{Deserialize, Serialize}; +use std::time::Duration; + +const API_URL: &str = "http://localhost:8000"; +fn api(str: &str) -> String { + format!("{}{}", API_URL, str) +} + +#[tokio::main] +async fn main() -> Result<()> { + let timeout = Duration::new(20, 0); + + let client = ClientBuilder::new().timeout(timeout).build()?; + + let admin_token = get_token(&client, &User::new("index", "index123")).await?; + + Ok(()) +} + +#[derive(Debug, Serialize, Deserialize)] +struct User { + user_id: String, + password: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct Token { + token: String, +} + +struct UserClient { + token: Token, + user: User, +} + +impl User { + fn new(username: &str, password: &str) -> Self { + Self { + user_id: username.to_string(), + password: password.to_string(), + } + } +} + +async fn create_user(client: &Client, admin_token: Token, user: &User) -> Result<()> { + let res = client.post(api("/users")); + Ok(()) +} + +async fn get_token(client: &Client, user: &User) -> Result { + let res = client + .post(api("/auth/login")) + .headers(get_headers()) + .body(serde_json::to_string(user).unwrap()) + .send() + .await? + .text() + .await?; + + dbg!(&res); + + let token = serde_json::from_str::(&res).unwrap(); + + Ok(token) +} + +fn get_headers() -> header::HeaderMap { + let mut headers = header::HeaderMap::new(); + headers.insert( + header::CONTENT_TYPE, + header::HeaderValue::from_static("application/json"), + ); + headers.insert(header::CONNECTION, "keep-alive".parse().unwrap()); + + headers +} diff --git a/prometheus.yml b/prometheus.yml new file mode 100644 index 000000000..f38b88b24 --- /dev/null +++ b/prometheus.yml @@ -0,0 +1,9 @@ +# global: +# scrape_interval: 5s +# evaluation_interval: 5s +# +scrape_configs: + - job_name: "lepton" + static_configs: + - targets: ["localhost:8000"] + - targets: ["docker.for.mac.localhost:8000"] diff --git a/requirements.txt b/requirements.txt index 89dc0a813..1771ccf2c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,6 +34,8 @@ django-rest-polymorphic == 0.1.9 django-mptt == 0.14.0 +django-prometheus == 2.3.1 + # Code quality # ------------------------------------------------------------------------------ pylint