From a0b6af88a8de789a32b8d2a9106c966d3eb4fbdf Mon Sep 17 00:00:00 2001 From: Ashley Felton Date: Mon, 14 Oct 2024 11:37:30 +0800 Subject: [PATCH 1/4] Add logging, include errors list in JSON endpoint. --- status.py | 77 ++++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 62 insertions(+), 15 deletions(-) diff --git a/status.py b/status.py index 4b25601..1c7d28e 100644 --- a/status.py +++ b/status.py @@ -1,5 +1,7 @@ import json +import logging import os +import sys from datetime import datetime from zoneinfo import ZoneInfo @@ -16,6 +18,15 @@ app = application = Bottle() +# Configure logging. +LOGGER = logging.getLogger() +LOGGER.setLevel(logging.INFO) +formatter = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.INFO) +handler.setFormatter(formatter) +LOGGER.addHandler(handler) + TZ = ZoneInfo(os.environ.get("TZ", "Australia/Perth")) OUTPUT_TEMPLATE = """ @@ -83,10 +94,7 @@ def get_session(): def healthcheck(): """Query HTTP sources and derive a dictionary of response successes.""" - d = { - "server_time": datetime.now().astimezone(TZ).isoformat(timespec="seconds"), - "success": True, - } + d = {"server_time": datetime.now().astimezone(TZ).isoformat(timespec="seconds"), "success": True, "errors": []} session = get_session() @@ -99,7 +107,10 @@ def healthcheck(): d["latest_point_delay"] = trackingdata["objects"][0]["age_minutes"] if trackingdata["objects"][0]["age_minutes"] > TRACKING_POINTS_MAX_DELAY: d["success"] = False - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Resource Tracking: {SSS_DEVICES_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Resource Tracking: {SSS_DEVICES_URL}") d["latest_point"] = None d["latest_point_delay"] = None d["success"] = False @@ -113,7 +124,10 @@ def healthcheck(): d["iridium_latest_point_delay"] = trackingdata["objects"][0]["age_minutes"] if trackingdata["objects"][0]["age_minutes"] > TRACKING_POINTS_MAX_DELAY: d["success"] = False - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Resource Tracking: {SSS_IRIDIUM_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Resource Tracking: {SSS_IRIDIUM_URL}") d["iridium_latest_point"] = None d["iridium_latest_point_delay"] = None d["success"] = False @@ -125,7 +139,10 @@ def healthcheck(): t = datetime.fromisoformat(trackingdata["objects"][0]["seen"]).astimezone(TZ) d["tracplus_latest_point"] = t.isoformat() d["tracplus_latest_point_delay"] = trackingdata["objects"][0]["age_minutes"] - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Resource Tracking: {SSS_TRACPLUS_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Resource Tracking: {SSS_TRACPLUS_URL}") d["tracplus_latest_point"] = None d["tracplus_latest_point_delay"] = None d["success"] = False @@ -137,7 +154,10 @@ def healthcheck(): t = datetime.fromisoformat(trackingdata["objects"][0]["seen"]).astimezone(TZ) d["dfes_latest_point"] = t.isoformat() d["dfes_latest_point_delay"] = trackingdata["objects"][0]["age_minutes"] - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Resource Tracking: {SSS_DFES_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Resource Tracking: {SSS_DFES_URL}") d["dfes_latest_point"] = None d["dfes_latest_point_delay"] = None d["success"] = False @@ -151,7 +171,10 @@ def healthcheck(): d["fleetcare_latest_point_delay"] = trackingdata["objects"][0]["age_minutes"] if trackingdata["objects"][0]["age_minutes"] > TRACKING_POINTS_MAX_DELAY: d["success"] = False - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Resource Tracking: {SSS_FLEETCARE_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Resource Tracking: {SSS_FLEETCARE_URL}") d["fleetcare_latest_point"] = None d["fleetcare_latest_point_delay"] = None d["success"] = False @@ -161,7 +184,10 @@ def healthcheck(): resp.raise_for_status() j = resp.json() d["csw_catalogue_count"] = len(j) - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying CSW API: {CSW_API}") + LOGGER.warning(e) + d["errors"].append(f"Error querying CSW API: {CSW_API}") d["csw_catalogue_count"] = None d["success"] = False @@ -179,7 +205,10 @@ def healthcheck(): root = ET.fromstring(resp.content) resp_d = {i[0]: i[1] for i in root.items()} d["todays_burns_count"] = int(resp_d["numberOfFeatures"]) - except Exception: + except Exception as e: + LOGGER.warning("Error querying KMI WFS (public:todays_burns)") + LOGGER.warning(e) + d["errors"].append("Error querying KMI WFS (public:todays_burns)") d["todays_burns_count"] = None d["success"] = False @@ -191,7 +220,10 @@ def healthcheck(): ns = {"wmts": "http://www.opengis.net/wmts/1.0", "ows": "http://www.opengis.net/ows/1.1"} layers = root.findall(".//wmts:Layer", ns) d["kmi_wmts_layer_count"] = len(layers) - except Exception: + except Exception as e: + LOGGER.warning("Error querying KMI WMTS layer count") + LOGGER.warning(e) + d["errors"].append("Error querying KMI WMTS layer count") d["kmi_wmts_layer_count"] = None d["success"] = False @@ -200,7 +232,10 @@ def healthcheck(): resp.raise_for_status() j = resp.json() d["bfrs_profile_api_endpoint"] = True - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying BFRS API endpoint: {BFRS_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying BFRS API endpoint: {BFRS_URL}") d["bfrs_profile_api_endpoint"] = None d["success"] = False @@ -259,7 +294,10 @@ def healthcheck(): resp.raise_for_status() j = resp.json() d["auth2_status"] = j["healthy"] - except Exception: + except Exception as e: + LOGGER.warning(f"Error querying Auth2 status API endpoint: {AUTH2_STATUS_URL}") + LOGGER.warning(e) + d["errors"].append(f"Error querying Auth2 status API endpoint: {AUTH2_STATUS_URL}") d["auth2_status"] = None d["success"] = False @@ -273,7 +311,16 @@ def healthcheck_json(): if CACHE_RESPONSE: # Mark response as "cache for 60 seconds". response.set_header("Cache-Control", "max-age=60") - return json.dumps(d) + + try: + return json.dumps(d) + except Exception as e: + LOGGER.warning("Error serialising healthcheck response as JSON") + LOGGER.warning(e) + return { + "server_time": datetime.now().astimezone(TZ).isoformat(timespec="seconds"), + "success": False, + } # Retain legacy health check route for PRTG. From c4485bb1d1182d86739b3ecea9560aac0d406271 Mon Sep 17 00:00:00 2001 From: Ashley Felton Date: Mon, 14 Oct 2024 11:38:01 +0800 Subject: [PATCH 2/4] Increment project minor version, bump project dependencies. --- kustomize/overlays/prod/kustomization.yaml | 46 +++++++++++----------- poetry.lock | 14 +++---- pyproject.toml | 6 +-- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/kustomize/overlays/prod/kustomization.yaml b/kustomize/overlays/prod/kustomization.yaml index 5ea21ad..7c015ae 100644 --- a/kustomize/overlays/prod/kustomization.yaml +++ b/kustomize/overlays/prod/kustomization.yaml @@ -1,23 +1,23 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -nameSuffix: -prod -resources: - - ../../base - - ingress.yaml - - pdb.yaml -secretGenerator: - - name: healthcheck-env - type: Opaque - envs: - - .env -labels: - - includeSelectors: true - pairs: - variant: prod -patches: - - path: deployment_patch.yaml - - path: deployment_hpa_patch.yaml - - path: service_patch.yaml -images: - - name: ghcr.io/dbca-wa/healthcheck - newTag: 1.3.2 +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +nameSuffix: -prod +resources: + - ../../base + - ingress.yaml + - pdb.yaml +secretGenerator: + - name: healthcheck-env + type: Opaque + envs: + - .env +labels: + - includeSelectors: true + pairs: + variant: prod +patches: + - path: deployment_patch.yaml + - path: deployment_hpa_patch.yaml + - path: service_patch.yaml +images: + - name: ghcr.io/dbca-wa/healthcheck + newTag: 1.3.3 diff --git a/poetry.lock b/poetry.lock index 64de5a3..3b33428 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,13 +20,13 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "bottle" -version = "0.13.1" +version = "0.13.2" description = "Fast and simple WSGI-framework for small web-applications." optional = false python-versions = "*" files = [ - {file = "bottle-0.13.1-py2.py3-none-any.whl", hash = "sha256:d5e068ad0b4ed3422231ad59bd9ea646a141f57a9c90587212d63477ec04fe96"}, - {file = "bottle-0.13.1.tar.gz", hash = "sha256:a48852dc7a051353d3e4de3dd5590cd25de370bcfd94a72237561e314ceb0c88"}, + {file = "bottle-0.13.2-py2.py3-none-any.whl", hash = "sha256:27569ab8d1332fbba3e400b3baab2227ab4efb4882ff147af05a7c00ed73409c"}, + {file = "bottle-0.13.2.tar.gz", hash = "sha256:e53803b9d298c7d343d00ba7d27b0059415f04b9f6f40b8d58b5bf914ba9d348"}, ] [[package]] @@ -510,13 +510,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -698,4 +698,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "1022457ee5899e29890c8dd4acf0223d0fb21a49e6758b3ed90a3be3b090f4f5" +content-hash = "86cb4f7384f46c8e74912170661dbe410dc6c2b7bc829dd57321fc5314b8ae31" diff --git a/pyproject.toml b/pyproject.toml index 35bb160..b9c52ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,15 +1,15 @@ [tool.poetry] name = "healthcheck" -version = "1.3.2" +version = "1.3.3" description = "Internal service endpoint health check for Spatial Support System" authors = ["ASI "] package-mode = false [tool.poetry.dependencies] python = "^3.12" -bottle = "0.13.1" +bottle = "0.13.2" requests = "2.32.3" -pytz = "2024.1" +pytz = "2024.2" python-dotenv = "1.0.1" gunicorn = "23.0.0" humanize = "4.11.0" From 1047e81bcf8127d1ed1e8a7550ce92004b993487 Mon Sep 17 00:00:00 2001 From: Ashley Felton Date: Mon, 14 Oct 2024 11:41:00 +0800 Subject: [PATCH 3/4] Added secret scan to GH workflows. --- .github/dependabot.yml | 2 -- .github/workflows/secret-scan.yml | 25 +++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/secret-scan.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5a9fb03..645c171 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,8 +5,6 @@ updates: schedule: interval: "weekly" - package-ecosystem: "github-actions" - # Workflow files stored in the - # default location of `.github/workflows` directory: "/" schedule: interval: "weekly" diff --git a/.github/workflows/secret-scan.yml b/.github/workflows/secret-scan.yml new file mode 100644 index 0000000..d19177f --- /dev/null +++ b/.github/workflows/secret-scan.yml @@ -0,0 +1,25 @@ +name: "Scan project for secrets & sensitive information" + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + secret-scan: + name: Scan project for secrets + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Secret scanning + uses: trufflesecurity/trufflehog@main + with: + base: "" + head: ${{ github.ref_name }} + extra_args: --only-verified From e6382e7a21488202816fad993b1e218c2aebb57a Mon Sep 17 00:00:00 2001 From: Ashley Felton Date: Mon, 14 Oct 2024 11:43:41 +0800 Subject: [PATCH 4/4] Add image vulnerability scan to workflow. --- .github/workflows/image-build-scan.yml | 33 ++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/.github/workflows/image-build-scan.yml b/.github/workflows/image-build-scan.yml index b942fed..b50d1a0 100644 --- a/.github/workflows/image-build-scan.yml +++ b/.github/workflows/image-build-scan.yml @@ -21,14 +21,14 @@ jobs: contents: read packages: write security-events: write - # Only required for workflows in private repositories - actions: read steps: #---------------------------------------------- # Checkout repo #---------------------------------------------- - name: Checkout repository uses: actions/checkout@v4 + with: + fetch-depth: 0 #---------------------------------------------- # Set up Docker BuildX environment #---------------------------------------------- @@ -64,6 +64,29 @@ jobs: push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - # NOTE: code scanning is not available for private repos without using - # GitHub Enterprise Cloud. Reference: - # https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning-for-a-repository + scan: + name: Image vulnerability scan + runs-on: ubuntu-latest + needs: [build] + permissions: + contents: read + packages: read + security-events: write + steps: + #---------------------------------------------- + # Run vulnerability scan on built image + #---------------------------------------------- + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + scan-type: 'image' + scanners: 'vuln' + image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + vuln-type: 'os,library' + severity: 'HIGH,CRITICAL' + format: 'sarif' + output: 'trivy-results.sarif' + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: 'trivy-results.sarif'