From 3cd28c734ab01dd62100cc4d9d1d2edc223da604 Mon Sep 17 00:00:00 2001 From: Margaret Lawson Date: Tue, 3 Dec 2024 22:03:31 +0000 Subject: [PATCH] Move code pulls earlier Run-GHA: true Required-githooks: true Signed-off-by: Margaret Lawson --- .../actions/import-commit-message/action.yml | 24 + .github/workflows/bash_unit_testing.yml | 32 - .github/workflows/ci2.yml | 152 ---- .github/workflows/create_release.yml | 25 - .github/workflows/gcp-builds.yml | 25 +- .github/workflows/landing-builds.yml | 398 --------- .github/workflows/linting.yml | 223 ----- .github/workflows/ossf-scorecard.yml | 76 -- .github/workflows/pr-metadata.yml | 44 - .../workflows/rpm-build-and-test-report.yml | 120 --- .github/workflows/rpm-build-and-test.yml | 789 ------------------ .github/workflows/trivy.yml | 77 -- .github/workflows/unit-testing.yml | 30 - .github/workflows/version-checks.yml | 34 - 14 files changed, 41 insertions(+), 2008 deletions(-) create mode 100644 .github/actions/import-commit-message/action.yml delete mode 100644 .github/workflows/bash_unit_testing.yml delete mode 100644 .github/workflows/ci2.yml delete mode 100644 .github/workflows/create_release.yml delete mode 100644 .github/workflows/landing-builds.yml delete mode 100644 .github/workflows/linting.yml delete mode 100644 .github/workflows/ossf-scorecard.yml delete mode 100644 .github/workflows/pr-metadata.yml delete mode 100644 .github/workflows/rpm-build-and-test-report.yml delete mode 100644 .github/workflows/rpm-build-and-test.yml delete mode 100644 .github/workflows/trivy.yml delete mode 100644 .github/workflows/unit-testing.yml delete mode 100644 .github/workflows/version-checks.yml diff --git a/.github/actions/import-commit-message/action.yml b/.github/actions/import-commit-message/action.yml new file mode 100644 index 00000000000..1bd05a1ef02 --- /dev/null +++ b/.github/actions/import-commit-message/action.yml @@ -0,0 +1,24 @@ +# it's a real shame that this step is even needed. push events have the commit message # in +# ${{ github.event.head_commit.message }} but pull_requests don't. :-( +name: 'Import Commit Message' +description: 'Import Commit Message' +runs: + using: "composite" + steps: + - name: Import Commit Message + id: commit_message + shell: bash + run: echo "text<> $GITHUB_OUTPUT; + git show -s --format=%B >> $GITHUB_OUTPUT; + echo "EOF" >> $GITHUB_OUTPUT; + - name: Import and Dequote Commit Message + id: dequoted_commit_message + shell: bash + run: . ci/gha_functions.sh; + echo "text<> $GITHUB_OUTPUT; + git show -s --format=%B | escape_single_quotes >> $GITHUB_OUTPUT; + echo "EOF" >> $GITHUB_OUTPUT; + - name: Identify Commit Pragmas + shell: bash + run: . ci/gha_functions.sh; + echo '${{ steps.dequoted_commit_message.outputs.text }}' | get_commit_pragmas diff --git a/.github/workflows/bash_unit_testing.yml b/.github/workflows/bash_unit_testing.yml deleted file mode 100644 index 0501258ffe7..00000000000 --- a/.github/workflows/bash_unit_testing.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: bash Unit Testing - -on: - pull_request: - -concurrency: - group: bash-unit-test-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -defaults: - run: - shell: bash --noprofile --norc -ueo pipefail {0} - -permissions: {} - -jobs: - Test-gha-functions: - name: Tests in ci/gha_functions.sh - if: github.repository == 'daos-stack/daos' - runs-on: [self-hosted, light] - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Checkout bash_unit project - uses: actions/checkout@v4 - with: - repository: 'pgrange/bash_unit' - path: bash_unit - - name: Unit testing with bash_unit - run: FORCE_COLOR=true ./bash_unit/bash_unit ci/gha_functions.sh diff --git a/.github/workflows/ci2.yml b/.github/workflows/ci2.yml deleted file mode 100644 index 3f9eb05a3b0..00000000000 --- a/.github/workflows/ci2.yml +++ /dev/null @@ -1,152 +0,0 @@ -name: Build - -on: - pull_request: - -concurrency: - group: ci2-${{ github.head_ref }} - cancel-in-progress: true - -permissions: {} - -jobs: - - # reuse the cache from the landing-builds workflow if available, if not then build the images - # from scratch, but do not save them. - Build-and-test: - name: Run DAOS/NLT tests - runs-on: ubuntu-22.04 - permissions: - # https://github.com/EnricoMi/publish-unit-test-result-action#permissions - checks: write - pull-requests: write - strategy: - matrix: - distro: [ubuntu] - include: - - distro: ubuntu - base: ubuntu - with: ubuntu:mantic - env: - DEPS_JOBS: 10 - COMPILER: clang - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - ref: ${{ github.event.pull_request.head.sha }} - - name: Setup git hash - run: ./ci/gha_helper.py - id: commit-hash - - name: Fetch docker images from cache. - uses: jpribyl/action-docker-layer-caching@v0.1.1 - continue-on-error: true - with: - key: ${{ steps.commit-hash.outputs.key }} - restore-keys: | - ${{ steps.commit-hash.outputs.restore }} - ${{ steps.commit-hash.outputs.restore_prev }} - skip-save: true - - name: Update dependencies in image. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.distro }} - --build-arg DAOS_BUILD=no - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build and Test - run: docker run --name build-post --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 - --env COMPILER --env DEPS_JOBS --user root:root build-image - ./daos/utils/ci/run_in_gha.sh - - name: Fetch results - if: always() - run: docker cp build-post:/home/daos/daos/nlt-junit.xml ./ - - name: Publish NLT test results - if: always() - uses: EnricoMi/publish-unit-test-result-action@v1.17 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - files: nlt-junit.xml - comment_mode: off # yamllint disable-line rule:truthy - fail_on: nothing - - Build: - name: Build DAOS - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - matrix: - distro: [rocky, fedora, leap.15] - compiler: [clang, gcc] - include: - - distro: rocky - base: el.9 - with: rockylinux/rockylinux:9 - - distro: fedora - base: el.8 - with: fedora:38 - - distro: leap.15 - base: leap.15 - with: opensuse/leap:15.5 - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - COMPILER: ${{ matrix.compiler }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - ref: ${{ github.event.pull_request.head.sha }} - - name: Setup git hash - run: ./ci/gha_helper.py - id: commit-hash - - name: Fetch docker images from cache. - uses: jpribyl/action-docker-layer-caching@v0.1.1 - continue-on-error: true - with: - key: ${{ steps.commit-hash.outputs.key }} - restore-keys: | - ${{ steps.commit-hash.outputs.restore }} - ${{ steps.commit-hash.outputs.restore_prev }} - skip-save: true - - name: Build in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build Java in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=yes - --build-arg COMPILER - - name: Build debug in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER - - name: Build devel in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER - -# Should work, but enable on master only for now. -# - name: Run NLT -# run: docker run --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 --user root:root -# --name build-post build-image -# ./daos/utils/node_local_test.py --no-root --memcheck no --test cont_copy diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml deleted file mode 100644 index 6e174f0b74a..00000000000 --- a/.github/workflows/create_release.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Create Release -# This workflow is triggered on pushes to the master branch of the repository. -on: - push: - paths: - - TAG - branches: - - master - - 'release/**' - -permissions: {} - -jobs: - make_release: - name: Create Release - if: github.repository == 'daos-stack/daos' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - uses: ./.github/actions/make_release - id: make_release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/gcp-builds.yml b/.github/workflows/gcp-builds.yml index e0b9c5acc70..3f4623a600c 100644 --- a/.github/workflows/gcp-builds.yml +++ b/.github/workflows/gcp-builds.yml @@ -127,7 +127,20 @@ jobs: env: DISTRO_NAME: DISTRO_VERSION: + LIBFABRIC_HASH: + MERCURY_HASH: + ISA-L_HASH: + ISA-L_CRYPTO_HASH: + ARGOBOTS_HASH: + DPDK_HASH: + SPDK_HASH: + RAFT_HASH: + PMDK_HASH: steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: Import commit pragmas uses: ./.github/actions/import-commit-pragmas - name: Set PR repo env vars @@ -175,10 +188,6 @@ jobs: echo "DISTRO_NAME=$DISTRO_NAME" >> $GITHUB_ENV echo "DISTRO_VERSION=$DISTRO_VERSION" >> $GITHUB_ENV echo "STAGE_NAME=Build RPM on $DISTRO_NAME $DISTRO_VERSION" >> $GITHUB_ENV - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - name: Remove Dockerignore file # necessary because our Dockerfile assumes /daos is our build context # but the .dockerignore file ignores many files we need in our RPM @@ -225,6 +234,10 @@ jobs: DISTRO_NAME: DISTRO_VERSION: steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: Import commit pragmas uses: ./.github/actions/import-commit-pragmas - name: Set variables @@ -246,10 +259,6 @@ jobs: echo "BASE_DOCKER_IMAGE=$BASE_DOCKER_IMAGE" >> $GITHUB_ENV echo "DAOS_DOCKER_IMAGE=$DAOS_DOCKER_IMAGE" >> $GITHUB_ENV echo "STAGE_NAME=Build Server Images" >> $GITHUB_ENV - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - name: Remove Dockerignore file # necessary because our Dockerfile assumes /daos is our build context # but the .dockerignore file ignores many files we need in our RPM diff --git a/.github/workflows/landing-builds.yml b/.github/workflows/landing-builds.yml deleted file mode 100644 index 814c4f0e71f..00000000000 --- a/.github/workflows/landing-builds.yml +++ /dev/null @@ -1,398 +0,0 @@ -name: Landings - -# Run on landings, or when this action itself is changed. -on: - push: - branches: - - master - - 'release/*' - pull_request: - paths: - - .github/workflows/landing-builds.yml - - 'utils/docker/Dockerfile.*' - - 'utils/scripts/install-*.sh' - - 'utils/scripts/helpers/*' - - utils/ci/** - - ci/** - - requirements-build.txt - - requirements-utest.txt - - utils/build.config - -permissions: {} - -jobs: - - # Build a base Docker image, and save it with a key based on the hash of the dependencies, and a - # copy of the git hash. Do not attempt to load the cache here, but rather start each build - # cleanly. As the file hash comes before the git hash then each master build gets to populate - # a new hash key, PRs then fetch based only on the file hash so they get the most recent master - # build to complete. - # This workflow only runs on landings, so the cache will only be built/saved when this changes, - # builds for specific PRs will attempt to read this cache only, but not save anything. - # For that reason set concurrency to 1 here for maximum reliability. - # Set max-parallel and cache concurrency to 1 to avoid https 429 errors on cache save. - # Distros where we build on PR should be covered here as the cache is populated for PRs to read, - # for distros where we only want to build on master and not PRs see the Build-branch job below. - Prepare: - name: Create Docker images - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - max-parallel: 1 - matrix: - distro: [ubuntu, rocky, fedora, leap.15] - include: - - distro: ubuntu - base: ubuntu - with: ubuntu:mantic - - distro: rocky - base: el.9 - with: rockylinux/rockylinux:9 - - distro: fedora - base: el.8 - with: fedora:38 - - distro: leap.15 - base: leap.15 - with: opensuse/leap:15.5 - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 500 - - name: Setup git hash - run: ./ci/gha_helper.py --single - id: commit-hash - - name: Setup docker cache - uses: jpribyl/action-docker-layer-caching@v0.1.1 - with: - key: ${{ steps.commit-hash.outputs.key }} - restore-keys: ${{ steps.commit-hash.outputs.restore }} - concurrency: 1 - - name: Prepare base image in Docker - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DAOS_DEPS_BUILD=no - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - - name: Build dependencies in Docker - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DAOS_BUILD=no - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - - name: Prune images not required for build. - run: docker images --all --filter label=DAOS=true --quiet | xargs docker rmi --no-prune - - Build-and-test: - name: Run DAOS/NLT tests - needs: Prepare - runs-on: ubuntu-22.04 - permissions: - # https://github.com/EnricoMi/publish-unit-test-result-action#permissions - checks: write - pull-requests: write - strategy: - matrix: - distro: [ubuntu] - include: - - distro: ubuntu - base: ubuntu - with: ubuntu:mantic - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - COMPILER: clang - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - - name: Setup git hash - run: ./ci/gha_helper.py --single - id: commit-hash - - uses: jpribyl/action-docker-layer-caching@v0.1.1 - continue-on-error: true - with: - key: ${{ steps.commit-hash.outputs.key }} - restore-keys: | - ${{ steps.commit-hash.outputs.restore }} - ${{ steps.commit-hash.outputs.restore_prev }} - skip-save: true - - name: Update dependencies in image. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DAOS_BUILD=no - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build and Test - run: docker run --name build-post --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 - --env COMPILER --env DEPS_JOBS --user root:root build-image - ./daos/utils/ci/run_in_gha.sh - - name: Fetch results - if: always() - run: docker cp build-post:/home/daos/daos/nlt-junit.xml ./ - - name: Publish NLT test results - if: always() - uses: EnricoMi/publish-unit-test-result-action@v1.17 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - files: nlt-junit.xml - comment_mode: off # yamllint disable-line rule:truthy - fail_on: nothing - - Build: - name: Build DAOS in image - needs: Prepare - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - max-parallel: 2 - matrix: - distro: [ubuntu, rocky, fedora, leap.15] - compiler: [clang, gcc] - include: - - distro: ubuntu - base: ubuntu - with: ubuntu:mantic - - distro: rocky - base: el.9 - with: rockylinux/rockylinux:9 - - distro: fedora - base: el.8 - with: fedora:38 - - distro: leap.15 - base: leap.15 - with: opensuse/leap:15.5 - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - COMPILER: ${{ matrix.compiler }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - - name: Setup git hash - run: ./ci/gha_helper.py --single - id: commit-hash - - uses: jpribyl/action-docker-layer-caching@v0.1.1 - continue-on-error: true - with: - key: ${{ steps.commit-hash.outputs.key }} - restore-keys: | - ${{ steps.commit-hash.outputs.restore }} - ${{ steps.commit-hash.outputs.restore_prev }} - skip-save: true - - name: Build in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build Java in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=yes - --build-arg COMPILER - - name: Build debug in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER - - name: Build devel in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER - # Fails with Ubuntu still for the spdk issue. - # - name: Run NLT - # run: docker run --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 - # --user root:root build-image ./daos/utils/node_local_test.py --no-root - # --memcheck no --test cont_copy - - Build-branch: - name: Build DAOS - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - matrix: - distro: [alma.8, alma.9, rocky.8, ubuntu] - include: - - distro: alma.8 - base: el.8 - with: almalinux:8 - - distro: alma.9 - base: el.9 - with: almalinux:9 - - distro: rocky.8 - base: el.8 - with: rockylinux/rockylinux:8 - - distro: ubuntu - base: ubuntu - with: ubuntu:22.04 - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - name: Build dependencies in image. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_BUILD=no - - name: Build in docker with clang - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER=clang - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build Java in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=yes - --build-arg COMPILER=clang - - name: Build debug in docker with clang. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER=clang - - name: Build devel in docker with clang - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER=clang - - name: Build in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER=gcc - - name: Build debug in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER=gcc - - name: Build devel in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER=gcc - - name: Run NLT - run: docker run --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 --user root:root - build-image ./daos/utils/node_local_test.py --no-root - --memcheck no --test cont_copy --system-ram-reserved 1 - - Build-branch-ARM: - name: Build DAOS (ARM64) - if: github.repository == 'daos-stack/daos' - runs-on: [self-hosted, ARM64] - strategy: - fail-fast: false - matrix: - distro: [ubuntu, alma.8, leap.15] - compiler: [clang] - include: - - distro: ubuntu - base: ubuntu - with: ubuntu:mantic - - distro: alma.8 - base: el.8 - with: almalinux:8 - - distro: leap.15 - base: leap.15 - with: opensuse/leap:15.5 - env: - DEPS_JOBS: 10 - BASE_DISTRO: ${{ matrix.with }} - DOCKER_BASE: ${{ matrix.base }} - COMPILER: ${{ matrix.compiler }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - name: Build dependencies in image. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_BUILD=no - - name: Build in docker with clang - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER=clang - --build-arg DAOS_KEEP_SRC=yes - --tag build-image - - name: Build Java in docker. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=yes - --build-arg COMPILER=clang - - name: Build debug in docker with clang. - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER=clang - - name: Build devel in docker with clang - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER=clang - - name: Build in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg COMPILER=gcc - - name: Build debug in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=debug - --build-arg COMPILER=gcc - - name: Build devel in docker with gcc - run: docker build . --file utils/docker/Dockerfile.${{ matrix.base }} - --build-arg DEPS_JOBS - --build-arg BASE_DISTRO - --build-arg DAOS_JAVA_BUILD=no - --build-arg DAOS_BUILD_TYPE=dev - --build-arg COMPILER=gcc - - name: Run NLT - run: docker run --mount type=tmpfs,destination=/mnt/daos_0,tmpfs-mode=1777 --user root:root - build-image ./daos/utils/node_local_test.py --no-root - --memcheck no --test cont_copy --system-ram-reserved 6 diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml deleted file mode 100644 index 6d611b21351..00000000000 --- a/.github/workflows/linting.yml +++ /dev/null @@ -1,223 +0,0 @@ -name: Linting - -# Always run on Pull Requests as then these checks can be marked as required. -on: - push: - branches: - - master - - 'feature/*' - - 'release/*' - pull_request: - -permissions: {} - -jobs: - # Run isort on the tree. - # This checks .py files only so misses SConstruct and SConscript files are not checked, rather - # for these files check them afterwards. The output-filter will not be installed for this part - # so regressions will be detected but not annotated. - isort: - name: Python isort - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - with: - python-version: '3' - - uses: isort/isort-action@f14e57e1d457956c45a19c05a89cccdf087846e5 # v1.1.0 - with: - requirementsFiles: "requirements.txt" - - name: Run on SConstruct file. - run: isort --check-only SConstruct - - name: Run on build files. - run: find . -name SConscript | xargs isort --check-only - - shell-check: - name: ShellCheck - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Run - run: sudo apt-get update && sudo apt-get install shellcheck - - name: Add error parser - run: echo -n "::add-matcher::ci/shellcheck-matcher.json" - - name: Run Shellcheck - # The check will run with this file from the target branch but the code from the PR so - # test for this file before calling it to prevent failures on PRs where this check is - # in the target branch but the PR is not updated to include it. - run: \[ ! -x ci/run_shellcheck.sh \] || ./ci/run_shellcheck.sh - - log-check: - name: Logging macro checking - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Check DAOS logging macro use. - run: ./utils/cq/d_logging_check.py --github src - - ftest-tags: - name: Ftest tag check - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Check DAOS ftest tags. - run: \[ ! -x src/tests/ftest/tags.py \] || ./src/tests/ftest/tags.py lint --verbose - - flake8-lint: - runs-on: ubuntu-22.04 - name: Flake8 check - steps: - - name: Check out source repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Set up Python environment - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - with: - python-version: '3' - - name: Add parser - run: echo -n "::add-matcher::ci/daos-flake-matcher.json" - - name: Add whitespace parser - run: echo -n "::add-matcher::ci/daos-flakew-matcher.json" - - name: Add error parser - run: echo -n "::add-matcher::ci/daos-flakee-matcher.json" - - name: flake8 Lint - uses: py-actions/flake8@84ec6726560b6d5bd68f2a5bed83d62b52bb50ba # v2.3.0 - with: - # W503 and W504 are related as they conflict. W503 is the preferred style and all code - # should be using it now. - ignore: 'W503' - exclude: 'src/control/vendor,src/client/pydaos/raw' - max-line-length: '100' - - name: flake8 Lint on SCons files. - uses: py-actions/flake8@84ec6726560b6d5bd68f2a5bed83d62b52bb50ba # v2.3.0 - with: - ignore: 'F821,W503,F841' - max-line-length: '100' - args: '--filename */SConscript, SConstruct' - - doxygen: - name: Doxygen - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Install doxygen - run: sudo apt-get install doxygen - - name: Add parser - run: echo -n "::add-matcher::ci/daos-doxygen-matcher.json" - - name: Run check - run: doxygen Doxyfile - - name: 'Upload Artifact' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: API Documentation - path: docs/doxygen/html/ - retention-days: 1 - - pylint: - name: Pylint check - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - with: - python-version: '3.11' - - name: Install python packages - run: python3 -m pip install --requirement requirements.txt - - name: Install enchant - run: sudo apt-get update && sudo apt-get -y install python3-enchant - - name: Show versions - run: ./utils/cq/daos_pylint.py --version - - name: Run pylint check. - run: ./utils/cq/daos_pylint.py --git --output-format github - - codespell: - name: Codespell - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Install extra python packages - run: python3 -m pip install --requirement utils/cq/requirements.txt - - name: Run check - uses: codespell-project/actions-codespell@3174815d6231f5bdc24dbfb6fc3b8caec73d521c # master - with: - skip: ./src/control/vendor,./src/control/go.sum,./.git - ignore_words_file: ci/codespell.ignores - builtin: clear,rare,informal,names,en-GB_to_en-US - - clang-format: - name: Clang Format - runs-on: ubuntu-22.04 - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Pull via git - run: git fetch origin ${{ github.event.pull_request.base.ref }} - - name: Run check in docker - uses: ./.github/actions/clang-format - with: - target: origin/${{ github.event.pull_request.base.ref }} - - name: Export changes - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: failure() - with: - name: format-patch-for-pr-${{ github.event.pull_request.number }} - path: auto-format-changes.diff - - yaml-lint: - name: Yamllint check - runs-on: ubuntu-22.04 - steps: - - name: Check out source repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Set up Python environment - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - with: - python-version: '3' - - name: Install extra python packages - run: python3 -m pip install --requirement utils/cq/requirements.txt - - name: Run check - run: yamllint --format github . - - linting-summary: - name: Linting Summary - runs-on: ubuntu-22.04 - needs: - - isort - - shell-check - - log-check - - ftest-tags - - flake8-lint - - doxygen - - pylint - - codespell - # - clang-format # not required - - yaml-lint - if: (!cancelled()) - steps: - - name: Check if any job failed - run: | - if [[ -z "$(echo "${{ join(needs.*.result, '') }}" | sed -e 's/success//g')" ]]; then - echo "All jobs succeeded" - else - echo "One or more jobs did not succeed" - exit 1 - fi diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml deleted file mode 100644 index ead66309fba..00000000000 --- a/.github/workflows/ossf-scorecard.yml +++ /dev/null @@ -1,76 +0,0 @@ -# This workflow uses actions that are not certified by GitHub. They are provided -# by a third-party and are governed by separate terms of service, privacy -# policy, and support documentation. - -name: Scorecard supply-chain security -on: - # For Branch-Protection check. Only the default branch is supported. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection - branch_protection_rule: - # To guarantee Maintained check is occasionally updated. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained - schedule: - - cron: '45 8 * * 0' - push: - branches: ["master"] - pull_request: - -# Declare default permissions as nothing. -permissions: {} - -jobs: - analysis: - name: Scorecard analysis - runs-on: ubuntu-latest - permissions: - # Needed to upload the results to code-scanning dashboard. - security-events: write - # Needed to publish results and get a badge (see publish_results below). - id-token: write - # Uncomment the permissions below if installing in a private repository. - # contents: read - # actions: read - - steps: - - name: "Checkout code" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - persist-credentials: false - - - name: "Run analysis" - uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 - with: - results_file: results.sarif - results_format: sarif - # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: - # - you want to enable the Branch-Protection check on a *public* repository, or - # - you are installing Scorecard on a *private* repository - # To create the PAT, follow the steps in - # https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. - # repo_token: ${{ secrets.SCORECARD_TOKEN }} - - # Public repositories: - # - Publish results to OpenSSF REST API for easy access by consumers - # - Allows the repository to include the Scorecard badge. - # - See https://github.com/ossf/scorecard-action#publishing-results. - # For private repositories: - # - `publish_results` will always be set to `false`, regardless - # of the value entered here. - publish_results: true - - # Upload the results as artifacts (optional). Commenting out will disable - # uploads of run results in SARIF - # format to the repository Actions tab. - - name: "Upload artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: SARIF file - path: results.sarif - retention-days: 5 - - # Upload the results to GitHub's code scanning dashboard (optional). - # Commenting out will disable upload of results to your repo's Code Scanning dashboard - - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@1b1aada464948af03b950897e5eb522f92603cc2 # v3.24.9 - with: - sarif_file: results.sarif diff --git a/.github/workflows/pr-metadata.yml b/.github/workflows/pr-metadata.yml deleted file mode 100644 index a4d08418ccc..00000000000 --- a/.github/workflows/pr-metadata.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Jira Report - -on: - # Having this be pull_request_target rather than pull_request means it runs in the context of the - # target branch rather than the PR, which in turn means the checkout is of the target. - # Trigger for the defaults plus "edited" so that it re-runs if a PR title is modified as that's - # what it's checking. This probably doesn't need to run on synchronize however if a PR is being - # worked then re-checking the Jira metadata is no bad thing. - pull_request_target: - types: [opened, synchronize, reopened, edited] - -permissions: {} - -jobs: - example_comment_pr: - runs-on: ubuntu-22.04 - permissions: - pull-requests: write - name: Report Jira data to PR comment - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: install jira - run: python3 -m pip install jira - - name: Load jira metadata - run: ./ci/jira_query.py ${{ github.event.pull_request.number }} - id: jira-data - - name: Comment on PR - if: always() - uses: thollander/actions-comment-pull-request@v2 - with: - comment_tag: 'jira_query_message' - message: ${{ steps.jira-data.outputs.message }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Set labels - if: ${{ always() && steps.jira-data.outputs.label != '' }} - uses: actions-ecosystem/action-add-labels@v1 - with: - labels: ${{ steps.jira-data.outputs.label }} - - name: Clear labels - if: ${{ always() && steps.jira-data.outputs.label-clear != '' }} - uses: actions-ecosystem/action-remove-labels@v1 - with: - labels: ${{ steps.jira-data.outputs.label-clear }} diff --git a/.github/workflows/rpm-build-and-test-report.yml b/.github/workflows/rpm-build-and-test-report.yml deleted file mode 100644 index d268550d2dc..00000000000 --- a/.github/workflows/rpm-build-and-test-report.yml +++ /dev/null @@ -1,120 +0,0 @@ -name: 'RPM Build and Test Report' - -on: - workflow_run: - workflows: ['RPM Build and Test'] - types: - - completed - # for testing before landing - workflow_dispatch: - -permissions: {} - -jobs: - report-vm-1: - runs-on: [self-hosted, docker] - # https://github.com/dorny/test-reporter/issues/149 - permissions: - checks: write - strategy: - matrix: - # TODO: figure out how to determine this matrix - distro: ['el8', 'el9', 'leap15'] - env: - STAGE_NAME: - steps: - - name: Set variables - run: | - case ${{ matrix.distro }} in - 'el8') - DISTRO_NAME="EL" - DISTRO_VERSION="8" - ;; - 'el9') - DISTRO_NAME="EL" - DISTRO_VERSION="9" - ;; - 'leap15') - DISTRO_NAME="Leap" - DISTRO_VERSION="15.4" - ;; - esac - echo "STAGE_NAME=Build RPM on $DISTRO_NAME $DISTRO_VERSION" >> $GITHUB_ENV - - name: Test Report - uses: phoenix-actions/test-reporting@v10 - with: - artifact: ${{ env.STAGE_NAME }} test-results - name: ${{ env.STAGE_NAME }} Test Results (phoenix-actions) - path: ${{ env.STAGE_NAME }}/**/results.xml - reporter: java-junit # Format of test results - - report-hw-1: - runs-on: [self-hosted, docker] - strategy: - matrix: - # TODO: figure out how to determine this matrix - stage: ['Large', 'Medium', 'Medium UCX Provider'] - env: - STAGE_NAME: - steps: - - name: Set variables - run: echo "STAGE_NAME=Functional Hardware ${{ matrix.stage }}" >> $GITHUB_ENV - - name: Test Report - uses: phoenix-actions/test-reporting@v10 - with: - artifact: ${{ env.STAGE_NAME }} test-results - name: ${{ env.STAGE_NAME }} Test Results (phoenix-actions) - path: ${{ env.STAGE_NAME }}/**/results.xml - reporter: java-junit # Format of test results - - report2: - runs-on: [self-hosted, docker] - strategy: - matrix: - distro: ['el8', 'el9', 'leap15'] - env: - STAGE_NAME: - steps: - - name: Set variables - run: | - case ${{ matrix.distro }} in - 'el8') - DISTRO_NAME="EL" - DISTRO_VERSION="8" - ;; - 'el9') - DISTRO_NAME="EL" - DISTRO_VERSION="9" - ;; - 'leap15') - DISTRO_NAME="Leap" - DISTRO_VERSION="15.4" - ;; - esac - echo "STAGE_NAME=Build RPM on $DISTRO_NAME $DISTRO_VERSION" >> $GITHUB_ENV - - name: Test Report - uses: dorny/test-reporter@v1 - with: - artifact: ${{ env.STAGE_NAME }} test-results - name: ${{ env.STAGE_NAME }} Test Results (dorny) - path: ${{ env.STAGE_NAME }}/**/results.xml - reporter: jest-junit # Format of test results - - report-hw-2: - runs-on: [self-hosted, docker] - strategy: - matrix: - # TODO: figure out how to determine this matrix - stage: ['Large', 'Medium', 'Medium UCX Provider'] - env: - STAGE_NAME: - steps: - - name: Set variables - run: echo "STAGE_NAME=Functional Hardware ${{ matrix.stage }}" >> $GITHUB_ENV - - name: Test Report - uses: dorny/test-reporter@v1 - with: - artifact: ${{ env.STAGE_NAME }} test-results - name: ${{ env.STAGE_NAME }} Test Results (dorny) - path: ${{ env.STAGE_NAME }}/**/results.xml - reporter: java-junit # Format of test results diff --git a/.github/workflows/rpm-build-and-test.yml b/.github/workflows/rpm-build-and-test.yml deleted file mode 100644 index a770293959f..00000000000 --- a/.github/workflows/rpm-build-and-test.yml +++ /dev/null @@ -1,789 +0,0 @@ -name: RPM Build and Test - -env: - # TODO: we really need to define a list of supported versions (ideally it's no more than 2) - # build is done on the lowest version and test on the highest with a "sanity test" - # stage done on all versions in the list ecept the highest - EL8_BUILD_VERSION: 8.6 - EL8_VERSION: 8.8 - EL9_BUILD_VERSION: 9 - EL9_VERSION: 9 - LEAP15_VERSION: 15.5 - -on: - workflow_dispatch: - inputs: - pr-repos: - description: 'Any PR-repos that you want included in this build' - required: false - pull_request: - -concurrency: - group: rpm-build-and-test-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -defaults: - run: - shell: bash --noprofile --norc -ueo pipefail {0} - -permissions: {} - -jobs: - # it's a real shame that this step is even needed. push events have the commit message # in - # ${{ github.event.head_commit.message }} but pull_requests don't. :-( - Import-commit-message: - name: Get commit message - if: github.repository == 'daos-stack/daos' - runs-on: [self-hosted, light] - # Map a step output to a job output - outputs: - message: ${{ steps.commit_message.outputs.text }} - dequoted_message: ${{ steps.dequoted_commit_message.outputs.text }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Import Commit Message - id: commit_message - run: echo "text<> $GITHUB_OUTPUT; - git show -s --format=%B >> $GITHUB_OUTPUT; - echo "EOF" >> $GITHUB_OUTPUT; - - name: Import and Dequote Commit Message - id: dequoted_commit_message - run: . ci/gha_functions.sh; - echo "text<> $GITHUB_OUTPUT; - git show -s --format=%B | escape_single_quotes >> $GITHUB_OUTPUT; - echo "EOF" >> $GITHUB_OUTPUT; - - name: Identify Commit Pragmas - run: . ci/gha_functions.sh; - echo '${{steps.dequoted_commit_message.outputs.text }}' | get_commit_pragmas - - Import-commit-pragmas: - name: Make commit pragma variables - runs-on: [self-hosted, light] - needs: [Import-commit-message] - # can't use matrixes for matrixed output yet - # https://github.com/actions/runner/pull/2477 - # strategy: - # matrix: - # distro: [el8, el9, leap15] - # include: - # - distro: el8 - # UC_DISTRO: EL8 - # - distro: el9 - # UC_DISTRO: EL9 - # - distro: leap15 - # UC_DISTRO: LEAP15 - # Map a step output to a job output - outputs: - rpm-test-version: ${{ steps.rpm-test-version.outputs.value }} - pr-repos: ${{ steps.pr-repos.outputs.value }} - run-gha: ${{ steps.run-gha.outputs.value }} - steps: - - name: Set rpm-test-version variable - id: rpm-test-version - uses: ./.github/actions/variable-from-pragma - with: - commit_message: ${{ needs.Import-commit-message.outputs.dequoted_message }} - pragma: RPM_TEST_VERSION - - name: Set pr-repos variable - id: pr-repos - uses: ./.github/actions/variable-from-pragma - with: - commit_message: ${{ needs.Import-commit-message.outputs.dequoted_message }} - pragma: PR_REPOS - - name: Set run-gha variable - id: run-gha - uses: ./.github/actions/variable-from-pragma - with: - commit_message: ${{ needs.Import-commit-message.outputs.dequoted_message }} - pragma: RUN_GHA - default: false - - Create-symlinks: - # you might think this is an odd place to do this and it should be done as a result of the - # build and/or testing stages and ideally you'd be right. - # the problem with that is that there is no way to get the success/fail result of individual - # axes of matrix jobs so there is no way to query them at the end and see their composite - # results. - # instead, the final result of the Build-RPM job, for example is a last-one-complete wins. - # so for example, if the el9 axis fails quickly and then the el8 axis succeeds afterward the - # resulting job state is success. - # instead we assume success at the beginning and then let any axis that fails remove the - # lastSuccessfulBuild link if it fails - name: Create lastBuild and lastSuccessfulBuild symlinks - runs-on: [self-hosted, light] - needs: [Import-commit-pragmas] - if: needs.Import-commit-pragmas.outputs.run-gha == 'true' && - needs.Import-commit-pragmas.outputs.rpm-test-version == '' && - !contains(needs.Import-commit-pragmas.outputs.pr-repos, 'daos@') - env: - # TODO -- this should be on stable, backedup storage, not /scratch - # yamllint disable-line rule:line-length - REPO_PATH: /scratch/job_repos/daos-stack/job/daos/job/PR-${{ github.event.pull_request.number }}/ - steps: - - name: Create lastBuild and lastSuccessfulBuild symlinks - run: . ci/gha_functions.sh; - mkdir -p ${REPO_PATH}; - rm -f ${REPO_PATH}last{,Successful}Build; - ln -s ${{ github.run_number }} ${REPO_PATH}lastBuild; - ln -s ${{ github.run_number }} ${REPO_PATH}lastSuccessfulBuild - - Calc-rpm-build-matrix: - name: Calculate RPM Build Matrix - runs-on: [self-hosted, wolf] - needs: [Import-commit-pragmas, Create-symlinks] - outputs: - matrix: ${{ steps.matrix.outputs.text }} - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Calculate RPM Build Matrix - id: matrix - run: | # do not use the non-| format for this script - l=() - trap 'echo "text=[$(IFS=","; echo "${l[*]}")]" >> $GITHUB_OUTPUT' EXIT - if ${CP_SKIP_BUILD:-false}; then - exit 0 - fi - if ! ${CP_SKIP_BUILD_EL8_RPM:-false}; then - l+=('"el8"') - fi - if ! ${CP_SKIP_BUILD_EL9_RPM:-false}; then - l+=('"el9"') - fi - if ${{ github.event_name == 'push' }} || - (${{ github.event_name == 'pull_request' }} && - ! ${CP_SKIP_BUILD_LEAP15_RPM:-false}); then - l+=('"leap15"') - fi - - Build-RPM: - name: Build RPM - permissions: - statuses: write - runs-on: [self-hosted, docker] - needs: [Create-symlinks, Import-commit-pragmas, Calc-rpm-build-matrix] - if: needs.Import-commit-pragmas.outputs.run-gha == 'true' && - needs.Create-symlinks.result == 'success' && - ((!cancelled()) || success() || failure()) - strategy: - matrix: - distro: ${{ fromJSON(needs.Calc-rpm-build-matrix.outputs.matrix) }} - fail-fast: false - env: - ARTIFACTORY_URL: https://artifactory.dc.hpdd.intel.com/ - DAOS_EMAIL: brian.murrell@intel.com - DAOS_FULLNAME: daos-stack - DISTRO: ${{ matrix.distro }} - DISTRO_REPOS: disabled - DOCKER_BUILDKIT: 0 - JENKINS_URL: https://build.hpdd.intel.com/ - ARTIFACTS_URL: file:///scratch/job_repos/ - MOCK_OPTIONS: --uniqueext=${{ github.run_id }} - PR_NUM: ${{ github.event.pull_request.number }} - # TODO -- this should be on stable, backedup storage, not /scratch - # yamllint disable-line rule:line-length - REPO_PATH: /scratch/job_repos/daos-stack/job/daos/job/PR-${{ github.event.pull_request.number }}/ - REPO_FILE_URL: https://artifactory.dc.hpdd.intel.com/artifactory/repo-files/ - RUN_ID: ${{ github.run_id }} - TARGET: ${{ matrix.distro }} - # keep VS Code's GHA linting happy - STAGE_NAME: - DISTRO_NAME: - DISTRO_VERSION: - CP_LEAP15_VERSION: - COMMIT_STATUS_DISTRO_VERSION: - FVERSION: - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Set variables - run: | - FVERSION="38" - case ${{ matrix.distro }} in - 'el8') - CHROOT_NAME="rocky+epel-8-x86_64" - DISTRO_NAME="EL" - DISTRO_VERSION="${{ env.EL8_BUILD_VERSION }}" - COMMIT_STATUS_DISTRO_VERSION="8" - ;; - 'el9') - CHROOT_NAME="rocky+epel-9-x86_64" - DISTRO_NAME="EL" - DISTRO_VERSION="${{ env.EL9_BUILD_VERSION }}" - ;; - 'leap15') - CHROOT_NAME="opensuse-leap-${{ env.CP_LEAP15_VERSION && - env.CP_LEAP15_VERSION || - env.LEAP15_VERSION }}-x86_64" - DISTRO_NAME="Leap" - DISTRO_VERSION="${{ env.CP_LEAP15_VERSION && - env.CP_LEAP15_VERSION || env.LEAP15_VERSION }}" - ;; - esac - echo "CHROOT_NAME=$CHROOT_NAME" >> $GITHUB_ENV - echo "DISTRO_NAME=$DISTRO_NAME" >> $GITHUB_ENV - echo "DISTRO_VERSION=$DISTRO_VERSION" >> $GITHUB_ENV - echo "BUILD_CHROOT=/var/lib/mock/$CHROOT_NAME-${{ github.run_id }}/" >> $GITHUB_ENV - echo "STAGE_NAME=Build RPM on $DISTRO_NAME $DISTRO_VERSION" >> $GITHUB_ENV - echo "FVERSION=$FVERSION" >> $GITHUB_ENV - echo "COMMIT_STATUS_DISTRO_VERSION=$COMMIT_STATUS_DISTRO_VERSION" >> $GITHUB_ENV - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Build RPM Docker image - id: build-rpm-docker-image - continue-on-error: true - run: docker build --file utils/rpms/packaging/Dockerfile.mockbuild - --build-arg CACHEBUST=$(date +%s%3N) - --build-arg CB0=$(date +%V) - --build-arg REPO_FILE_URL=$REPO_FILE_URL - --build-arg UID=$(id -u) - --build-arg FVERSION=${{ env.FVERSION }} - --tag mock-build - utils/rpms - - name: Build RPM - id: build-rpm - continue-on-error: true - # yamllint disable rule:line-length - run: rm -rf mock_result; - mkdir -p mock_result; - docker run --name mock-build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.distro }} - --user build - -v "$PWD":"$PWD" -w "$PWD" - -v "$PWD"/mock_result:/var/lib/mock/$CHROOT_NAME/result - --privileged=true - -e DAOS_FULLNAME="$DAOS_FULLNAME" - -e DAOS_EMAIL="$DAOS_EMAIL" - -e DISTRO_VERSION="$DISTRO_VERSION" - -e STAGE_NAME="$STAGE_NAME" - -e CHROOT_NAME="$CHROOT_NAME" - -e ARTIFACTORY_URL="$ARTIFACTORY_URL" - -e REPO_FILE_URL="$REPO_FILE_URL" - -e JENKINS_URL="$JENKINS_URL" - -e TARGET="$TARGET" - mock-build ci/rpm/build.sh - # yamllint enable rule:line-length - - name: Build RPM failure log - id: build-rpm-fail-log - continue-on-error: true - if: steps.build-rpm.outcome != 'success' - run: cat mock_result/root.log; - cat mock_result/build.log - - name: Save RPM build logs - continue-on-error: true - uses: actions/upload-artifact@v4 - with: - name: ${{ env.STAGE_NAME }} logs - path: | - mock_result/root.log - mock_result/build.log - - name: Create repo - id: create-repo - if: steps.build-rpm.outcome == 'success' - continue-on-error: true - run: CHROOT_NAME=$CHROOT_NAME ci/rpm/create_repo.sh - - name: Test repo - id: test-repo - if: steps.create-repo.outcome == 'success' - continue-on-error: true - run: . ci/gha_functions.sh; - dnf --disablerepo=\* --repofrompath - testrepo,file://${REPO_PATH}${{ github.run_number }}/artifact/artifacts/$TARGET - repoquery -a - - name: Remove lastSuccessfulBuild link and exit failure - if: steps.test-repo.outcome != 'success' - run: rm -f ${REPO_PATH}lastSuccessfulBuild; - exit 1 - - name: Publish RPMs - uses: actions/upload-artifact@v4 - with: - name: ${{ env.DISTRO_NAME }} ${{ env.DISTRO_VERSION }} RPM repository - path: ${{ env.REPO_PATH}}${{ github.run_number }}/artifact/artifacts/${{ env.TARGET }} - - name: Update commit status - uses: ouzi-dev/commit-status-updater@v2 - with: - # yamllint disable-line rule:line-length - name: 'build/Build RPM on ${{ env.DISTRO_NAME }} ${{ env.COMMIT_STATUS_DISTRO_VERSION && env.COMMIT_STATUS_DISTRO_VERSION || env.DISTRO_VERSION }}' - status: "${{ job.status }}" - - Calc-functional-matrix: - name: Calculate Functional Testing Matrix - runs-on: [self-hosted, wolf] - needs: [Import-commit-pragmas] - if: needs.Import-commit-pragmas.outputs.run-gha == 'true' && - ((!cancelled()) || success() || failure()) - outputs: - matrix: ${{ steps.matrix.outputs.text }} - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Calculate Functional Testing Matrix - id: matrix - run: | # do not use the non-| format for this script - . ci/gha_functions.sh - set -eu - # it might seem tempting to factor in the result of the build for this - # distro here and not include a failed build in the test matrix but - # the problem with that is that if/when the user asks GHA to rebuild - # all failed jobs and a previously failed RPM job is successful, the - # test matrix won't include testing it since it was calculated and was - # successful on the previous run without the failed build stage in it - l=() - trap 'echo "text=[$(IFS=","; echo "${l[*]}")]" >> $GITHUB_OUTPUT' EXIT - if ${CP_SKIP_FUNC_TEST:-false}; then - exit 0 - fi - if ! cd src/tests/ftest; then - echo "src/tests/ftest doesn't exist." - echo "Could not determine if tests exist for this stage, assuming they do." - exit 0 - fi - - if ./launch.py --list "$(get_test_tags "-hw")"; then - if ! ${CP_SKIP_BUILD_EL8_RPM:-false} && - ! ${CP_SKIP_FUNC_TEST_EL8:-false}; then - # it would definitely be nicer to get these into the environment - # as unquoted strings so that we didn't have to double quote here - l+=('"el8"') - fi - if ! ${CP_SKIP_BUILD_EL9_RPM:-false} && - ! ${CP_SKIP_FUNC_TEST_EL9:-false}; then - l+=('"el9"') - fi - if ${{ github.event_name == 'push' }} || - (${{ github.event_name == 'pull_request' }} && - ! ${CP_SKIP_BUILD_LEAP15_RPM:-false} && - ! ${CP_SKIP_FUNC_TEST_LEAP15:-true}); then - l+=('"leap15"') - fi - fi - - Functional: - name: Functional Testing - runs-on: [self-hosted, wolf] - permissions: - statuses: write - # https://github.com/EnricoMi/publish-unit-test-result-action#permissions - checks: write - pull-requests: write - timeout-minutes: 7200 - needs: [Build-RPM, Import-commit-message, Calc-functional-matrix, Import-commit-pragmas] - strategy: - matrix: - distro: ${{ fromJSON(needs.Calc-functional-matrix.outputs.matrix) }} - fail-fast: false - # https://github.com/actions/runner/issues/491#issuecomment-926924523 - if: | - needs.Import-commit-pragmas.outputs.run-gha == 'true' && - needs.Calc-functional-matrix.outputs.matrix != '[]' && - (!cancelled()) && - (needs.Build-RPM.result == 'success' || - needs.Build-RPM.result == 'skipped') - env: - CONFIG_POWER_ONLY: false - PRAGMA_SUFFIX: -vm - OPERATIONS_EMAIL: brian.murrell@intel.com - TEST_RPMS: true - COMMIT_MESSAGE: ${{ needs.Import-commit-message.outputs.message }} - JENKINS_URL: https://build.hpdd.intel.com/ - REPOSITORY_URL: https://repo.dc.hpdd.intel.com/ - REMOVE_EXISTING_RPMS: false - # TODO -- this should be on stable, backedup storage - ARTIFACTS_URL: file:///scratch/job_repos/ - REPO_FILE_URL: https://artifactory.dc.hpdd.intel.com/artifactory/repo-files/ - # keep VS Code's GHA linting happy - NODESTRING: - CP_PR_REPOS: - CP_FEATURES: - CP_TEST_TAG: - CP_EL8_VM9_LABEL: - CP_EL9_VM9_LABEL: - CP_LEAP15_VM9_LABEL: - CP_PRIORITY: - CP_EL8_VERSION: - CP_EL9_VERSION: - CP_LEAP15_VERSION: - DISTRO: - CLUSTER_REQUEST_reqid: - STAGE_NAME: - QUEUE_URL: - LABEL: - DISTRO_NAME: - DISTRO_VERSION: - COMMIT_STATUS_DISTRO_VERSION: - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Set variables - run: | - set -eux - env - STAGE_TAGS="-hw" - FTEST_ARG="" - INST_RPMS="daos-client daos-tests daos-server daos-serialize daos-tests-internal" - case "${{ matrix.distro }}" in - 'el8') - CHROOT_NAME="rocky+epel-8-x86_64" - DISTRO_NAME="EL" - DISTRO_NAME_UPPER="EL" - DISTRO_NAME_LOWER="el" - DISTRO_VERSION="${{ env.CP_EL8_VERSION && - env.CP_EL8_VERSION || env.EL8_VERSION }}" - DISTRO_VERSION_MAJOR="8" - OPENMPI="openmpi" - LABEL="${{ env.CP_EL8_VM9_LABEL && - env.CP_EL8_VM9_LABEL || 'ci_vm9' }}" - ;; - 'el9') - CHROOT_NAME="rocky+epel-9-x86_64" - DISTRO_NAME="EL" - DISTRO_NAME_UPPER="EL" - DISTRO_NAME_LOWER="el" - DISTRO_VERSION="${{ env.CP_EL9_VERSION && - env.CP_EL9_VERSION || env.EL9_VERSION }}" - DISTRO_VERSION_MAJOR="9" - PROV_DISTRO_VERSION_MAJOR="8" - OPENMPI="openmpi" - LABEL="${{ env.CP_EL9_VM9_LABEL && - env.CP_EL9_VM9_LABEL || 'ci_vm9' }}" - ;; - 'leap15') - CHROOT_NAME="opensuse-leap-${{ env.CP_LEAP15_VERSION && - env.CP_LEAP15_VERSION || - env.LEAP15_VERSION }}-x86_64" - DISTRO_NAME="Leap" - DISTRO_NAME_UPPER="LEAP" - DISTRO_NAME_LOWER="leap" - DISTRO_VERSION="${{ env.CP_LEAP15_VERSION && - env.CP_LEAP15_VERSION || env.LEAP15_VERSION }}" - DISTRO_VERSION_MAJOR="15" - OPENMPI="openmpi3" - LABEL="${{ env.CP_LEAP15_VM9_LABEL && - env.CP_LEAP15_VM9_LABEL || 'ci_vm9' }}" - ;; - esac - echo "CHROOT_NAME=$CHROOT_NAME" >> $GITHUB_ENV - echo "DISTRO_NAME=$DISTRO_NAME" >> $GITHUB_ENV - echo "DISTRO_VERSION=$DISTRO_VERSION" >> $GITHUB_ENV - echo "DISTRO_WITH_VERSION=$DISTRO_NAME_LOWER$DISTRO_VERSION" >> $GITHUB_ENV - echo "BUILD_CHROOT=/var/lib/mock/$CHROOT_NAME-${{ github.run_id }}/" >> $GITHUB_ENV - echo "STAGE_NAME=Functional on $DISTRO_NAME $DISTRO_VERSION" >> $GITHUB_ENV - echo "STAGE_TAGS=$STAGE_TAGS" >> $GITHUB_ENV - echo "FTEST_ARG=$FTEST_ARG" >> $GITHUB_ENV - echo "DISTRO=${DISTRO_NAME_UPPER}_$DISTRO_VERSION_MAJOR" >> $GITHUB_ENV - echo -n "PROVISION_DISTRO=${DISTRO_NAME_UPPER}_" >> $GITHUB_ENV - echo "${PROV_DISTRO_VERSION_MAJOR:-$DISTRO_VERSION_MAJOR}" >> $GITHUB_ENV - echo -n "DAOS_STACK_${DISTRO_NAME_UPPER}_" >> $GITHUB_ENV - echo "${PROV_DISTRO_VERSION_MAJOR:-$DISTRO_VERSION_MAJOR}_LOCAL_REPO=not_used" >> \ - $GITHUB_ENV - echo "LABEL=$LABEL" >> $GITHUB_ENV - echo "INST_RPMS=$INST_RPMS" >> $GITHUB_ENV - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - ref: ${{ github.event.pull_request.head.sha }} - - name: Request and Provision a Cluster - timeout-minutes: 7200 - uses: ./.github/actions/provision-cluster - with: - condition: env.CP_SKIP_FUNC_TEST-${{ env.DISTRO }} != 'true' && \ - env.CP_SKIP_FUNC_TEST != 'true' - - name: Run Test - timeout-minutes: 7200 - if: env.CP_SKIP_FUNC_TEST-${{ env.DISTRO }} != 'true' && env.CP_SKIP_FUNC_TEST != 'true' - id: run-test - run: | - . ci/gha_functions.sh - NODE_COUNT="$NODE_COUNT" \ - TEST_TAG="$(get_test_tags ${{ env.STAGE_TAGS}})" \ - FTEST_ARG="${{ env.FTEST_ARG }}" ci/functional/test_main.sh - - name: Cancel cluster request (if cancelled after requesting) - if: cancelled() - run: | - set -eux - . ci/gha_functions.sh - if ! JENKINS_URL="${{ env.JENKINS_URL }}" QUEUE_URL="${{ env.QUEUE_URL }}" \ - cancel_provision; then - # probably already provisioned and needs unprovisioning - if ! cleanup_provision_request "${{ env.CLUSTER_REQUEST_reqid }}"; then - exit 1 - fi - fi - - name: Job cleanup - if: (!cancelled() && (success() || failure())) - run: | - set -eux - . ci/gha_functions.sh - NODELIST=${{ env.NODESTRING }} ci/functional/job_cleanup.sh || true - cleanup_provision_request "${{ env.CLUSTER_REQUEST_reqid }}" - - name: Publish test results - if: (!cancelled()) && (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: EnricoMi/publish-unit-test-result-action@v2 - with: - check_name: ${{ env.STAGE_NAME }} Test Results (old) - github_token: ${{ secrets.GITHUB_TOKEN }} - junit_files: ${{ env.STAGE_NAME }}/**/results.xml - - name: Publish artifacts - if: (!cancelled()) && (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: actions/upload-artifact@v4 - with: - name: ${{ env.STAGE_NAME }} artifacts - path: ${{ env.STAGE_NAME }}/** - - name: Upload test results - if: (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: actions/upload-artifact@v4 - with: - name: ${{ env.STAGE_NAME }} test-results - path: ${{ env.STAGE_NAME }}/**/results.xml - - name: Update commit status - uses: ouzi-dev/commit-status-updater@v2 - with: - # yamllint disable-line rule:line-length - name: 'test/Functional on ${{ env.DISTRO_NAME }} ${{ env.COMMIT_STATUS_DISTRO_VERSION && env.COMMIT_STATUS_DISTRO_VERSION || env.DISTRO_VERSION }}' - status: "${{ job.status }}" - - Calc-functional-hardware-matrix: - name: Calculate Functional Hardware Testing Matrix - runs-on: [self-hosted, wolf] - needs: [Import-commit-pragmas] - if: needs.Import-commit-pragmas.outputs.run-gha == 'true' && - ((!cancelled()) || success() || failure()) - outputs: - matrix: ${{ steps.matrix.outputs.text }} - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Calculate Functional Testing Matrix - id: matrix - run: | # do not use the non-| format for this script - . ci/gha_functions.sh - set -eu - # it might seem tempting to factor in the result of the build for this - # distro here and not include a failed build in the test matrix but - # the problem with that is that if/when the user asks GHA to rebuild - # all faiiled jobs and a previously failed RPM job is successful, the - # test matrix won't include testing it since it was calculated and was - # successful on the previous run without the failed build stage in it - l=() - trap 'echo "text=[$(IFS=","; echo "${l[*]}")]" >> $GITHUB_OUTPUT' EXIT - if ${CP_SKIP_FUNC_HW_TEST:-false}; then - exit 0 - fi - if ! cd src/tests/ftest; then - echo "src/tests/ftest doesn't exist." - echo "Could not determine if tests exist for this stage, assuming they do." - exit 0 - fi - if ! "${CP_SKIP_FUNC_HW_TEST_LARGE:-false}" && - ./launch.py --list "$(get_test_tags "hw,large,-provider")"; then - # it would definitely be nicer to get these into the environment - # as unquoted strings so that we didn't have to double quote here - l+=('"Large"') - fi - if ! ${CP_SKIP_FUNC_HW_TEST_MEDIUM:-false} && - ./launch.py --list "$(get_test_tags "hw,medium,-provider")"; then - l+=('"Medium"') - fi - if ! ${CP_SKIP_FUNC_HW_TEST_MEDIUM_VERBS_PROVIDER:-false} && - ./launch.py --list "$(get_test_tags "hw,medium,provider")"; then - l+=('"Medium Verbs Provider"') - fi - if ${{ github.event_name == 'push' }} && - ! ${CP_SKIP_FUNC_HW_TEST_MEDIUM_UCX_PROVIDER:-false} && - ./launch.py --list "$(get_test_tags "hw,medium,provider")"; then - l+=('"Medium UCX Provider"') - fi - - Functional_Hardware: - name: Functional Testing on Hardware - runs-on: [self-hosted, wolf] - permissions: - statuses: write - # https://github.com/EnricoMi/publish-unit-test-result-action#permissions - checks: write - pull-requests: write - timeout-minutes: 7200 - needs: [Import-commit-message, Build-RPM, Calc-functional-hardware-matrix, - Import-commit-pragmas, Functional] - strategy: - matrix: - stage: ${{ fromJSON(needs.Calc-functional-hardware-matrix.outputs.matrix) }} - fail-fast: false - # https://github.com/actions/runner/issues/491#issuecomment-926924523 - if: | - needs.Import-commit-pragmas.outputs.run-gha == 'true' && - needs.Calc-functional-hardware-matrix.outputs.matrix != '[]' && - (!cancelled()) && - (needs.Build-RPM.result == 'success' || - needs.Build-RPM.result == 'skipped') && - (needs.Functional.result == 'success' || - needs.Functional.result == 'skipped') - env: - CONFIG_POWER_ONLY: false - PRAGMA_SUFFIX: -vm - OPERATIONS_EMAIL: brian.murrell@intel.com - TEST_RPMS: true - COMMIT_MESSAGE: ${{ needs.Import-commit-message.outputs.message }} - JENKINS_URL: https://build.hpdd.intel.com/ - REPOSITORY_URL: https://repo.dc.hpdd.intel.com/ - REMOVE_EXISTING_RPMS: false - # TODO -- this should be on stable, backedup storage - ARTIFACTS_URL: file:///scratch/job_repos/ - REPO_FILE_URL: https://artifactory.dc.hpdd.intel.com/artifactory/repo-files/ - # keep VS Code's GHA linting happy - NODESTRING: - CP_PR_REPOS: - CP_TEST_TAG: - CP_HW_MEDIUM_LABEL: - CP_HW_LARGE_LABEL: - CP_PRIORITY: - CP_EL8_VERSION: - CP_EL8_TARGET: - CLUSTER_REQUEST_reqid: - STAGE_NAME: - QUEUE_URL: - LABEL: - COMMIT_STATUS_DISTRO_VERSION: - steps: - - name: Import commit pragmas - uses: ./.github/actions/import-commit-pragmas - - name: Set variables - run: | - STAGE_TAGS="hw" - FTEST_ARG="--nvme=auto:-3DNAND" - INST_RPMS="daos-client daos-tests daos-server daos-serialize daos-tests-internal" - CHROOT_NAME="rocky+epel-8-x86_64" - DISTRO_NAME="EL" - DISTRO_NAME_UPPER="EL" - DISTRO_NAME_LOWER="el" - DISTRO_VERSION="${{ env.CP_EL8_TARGET && - env.CP_EL8_TARGET || - env.CP_EL8_VERSION && - env.CP_EL8_VERSION || env.EL8_VERSION }}" - DISTRO_VERSION_MAJOR="8" - if [[ "${{ matrix.stage }}" = Medium* ]]; then - LABEL=${{ env.CP_HW_MEDIUM_LABEL && - env.CP_HW_MEDIUM_LABEL || 'ci_nvme5' }} - STAGE_TAGS+=",medium" - SIZE="MEDIUM" - elif [[ "${{ matrix.stage }}" = Large* ]]; then - LABEL=${{ env.CP_HW_LARGE_LABEL && - env.CP_HW_LARGE_LABEL || 'ci_nvme9' }} - STAGE_TAGS+=",large" - SIZE="LARGE" - fi - if [[ "${{ matrix.stage }}" = *\ Provider ]]; then - STAGE_TAGS+=",provider" - if [[ "${{ matrix.stage }}" = *\ Verbs\ * ]]; then - FTEST_ARG+=' --provider ofi+verbs' - elif [[ "${{ matrix.stage }}" = *\ UCX\ * ]]; then - FTEST_ARG+=' --provider ucx+dc_x' - INST_RPMS+=' mercury-ucx' - elif [[ "${{ matrix.stage }}" = *\ TCP\ * ]]; then - FTEST_ARG+=' --provider ofi+tcp' - else - echo "Unknown provider in ${{ matrix.stage }}" - exit 1 - fi - else - STAGE_TAGS+=",-provider" - fi - echo "DISTRO_NAME=$DISTRO_NAME" >> $GITHUB_ENV - echo "DISTRO_VERSION=$DISTRO_VERSION" >> $GITHUB_ENV - echo "DISTRO_WITH_VERSION=$DISTRO_NAME_LOWER$DISTRO_VERSION" >> $GITHUB_ENV - echo "STAGE_NAME=Functional Hardware ${{ matrix.stage }}" >> $GITHUB_ENV - echo "STAGE_TAGS=$STAGE_TAGS" >> $GITHUB_ENV - echo "FTEST_ARG=$FTEST_ARG" >> $GITHUB_ENV - echo "DISTRO=${DISTRO_NAME_UPPER}_$DISTRO_VERSION_MAJOR" >> $GITHUB_ENV - echo -n "PROVISION_DISTRO=${DISTRO_NAME_UPPER}_" >> $GITHUB_ENV - echo "${PROV_DISTRO_VERSION_MAJOR:-$DISTRO_VERSION_MAJOR}" >> $GITHUB_ENV - echo -n "DAOS_STACK_${DISTRO_NAME_UPPER}_" >> $GITHUB_ENV - echo "${PROV_DISTRO_VERSION_MAJOR:-$DISTRO_VERSION_MAJOR}_LOCAL_REPO=not_used" >> \ - $GITHUB_ENV - echo "LABEL=$LABEL" >> $GITHUB_ENV - echo "INST_RPMS=$INST_RPMS" >> $GITHUB_ENV - echo "SIZE=$SIZE" >> $GITHUB_ENV - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - fetch-depth: 500 - ref: ${{ github.event.pull_request.head.sha }} - - name: Request and Provision a Cluster - timeout-minutes: 7200 - uses: ./.github/actions/provision-cluster - with: - condition: env.CP_SKIP_FUNC_HW_TEST-${{ env.SIZE }} != 'true' && \ - env.CP_SKIP_FUNC_HW_TEST != 'true' - - name: Run Test - timeout-minutes: 7200 - if: env.CP_SKIP_FUNC_HW_TEST-${{ env.SIZE }} != 'true' && env.CP_SKIP_FUNC_HW_TEST != 'true' - id: run-test - run: | - . ci/gha_functions.sh - NODE_COUNT="$NODE_COUNT" \ - TEST_TAG="$(get_test_tags ${{ env.STAGE_TAGS}})" \ - FTEST_ARG="${{ env.FTEST_ARG }}" ci/functional/test_main.sh - - name: Cancel cluster request (if cancelled after requesting) - if: cancelled() - run: | - set -eux - . ci/gha_functions.sh - if ! JENKINS_URL="${{ env.JENKINS_URL }}" QUEUE_URL="${{ env.QUEUE_URL }}" \ - cancel_provision; then - # probably already provisioned and needs unprovisioning - if ! cleanup_provision_request "${{ env.CLUSTER_REQUEST_reqid }}"; then - exit 1 - fi - fi - - name: Job cleanup - if: (!cancelled() && (success() || failure())) - run: | - set -eux - . ci/gha_functions.sh - cleanup_provision_request "${{ env.CLUSTER_REQUEST_reqid }}" - NODELIST=${{ env.NODESTRING }} ci/functional/job_cleanup.sh - - name: Publish test results - if: (!cancelled()) && (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: EnricoMi/publish-unit-test-result-action@v2 - with: - check_name: ${{ env.STAGE_NAME }} Test Results (old) - github_token: ${{ secrets.GITHUB_TOKEN }} - junit_files: ${{ env.STAGE_NAME }}/**/results.xml - - name: Publish artifacts - if: (!cancelled()) && (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: actions/upload-artifact@v4 - with: - name: ${{ env.STAGE_NAME }} artifacts - path: ${{ env.STAGE_NAME }}/** - - name: Upload test results - if: (success() || failure()) && - steps.run-test.outcome != 'skipped' - uses: actions/upload-artifact@v4 - with: - name: ${{ env.STAGE_NAME }} test-results - path: ${{ env.STAGE_NAME }}/**/results.xml - - name: Update commit status - uses: ouzi-dev/commit-status-updater@v2 - with: - name: 'test/Functional Hardware ${{ matrix.stage }}' - status: "${{ job.status }}" diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml deleted file mode 100644 index 89152cb1afa..00000000000 --- a/.github/workflows/trivy.yml +++ /dev/null @@ -1,77 +0,0 @@ -# SPDX-License-Identifier: BSD-2-Clause-Patent -# Copyright (c) 2024 Intel Corporation. - -name: Trivy scan - -on: - workflow_dispatch: - schedule: - - cron: '0 0 * * *' - push: - branches: ["master", "release/**"] - pull_request: - branches: ["master", "release/**"] - -# Declare default permissions as nothing. -permissions: {} - -jobs: - scan: - name: Scan with Trivy - runs-on: ubuntu-latest - permissions: - security-events: write - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Run Trivy vulnerability scanner in filesystem mode (table format) - uses: aquasecurity/trivy-action@915b19bbe73b92a6cf82a1bc12b087c9a19a5fe2 # 0.28.0 - with: - scan-type: 'fs' - scan-ref: '.' - trivy-config: 'utils/trivy/trivy.yaml' - - - name: Prepare the report to be uploaded to the GitHub artifact store - run: | - mkdir report - cp trivy-report-daos.txt report - cp utils/trivy/.trivyignore report/trivyignore.txt - - - name: Upload the report to the GitHub artifact store - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - path: report/* - name: trivy-report-daos - - - name: Adjust config file to use sarif format - run: | - sed -i 's/output: "trivy-report-daos.txt"/output: "trivy-results.sarif"/g' \ - utils/trivy/trivy.yaml - sed -i 's/format: template/format: sarif/g' utils/trivy/trivy.yaml - - - name: Run Trivy vulnerability scanner in filesystem mode (sarif format) - uses: aquasecurity/trivy-action@915b19bbe73b92a6cf82a1bc12b087c9a19a5fe2 # 0.28.0 - with: - scan-type: 'fs' - scan-ref: '.' - trivy-config: 'utils/trivy/trivy.yaml' - - - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@afb54ba388a7dca6ecae48f608c4ff05ff4cc77a - # 3.25.15 (v3) - with: - sarif_file: 'trivy-results.sarif' - - - name: Adjust config file to show and validate scan results - run: | - sed -i 's/output: "trivy-results.sarif"//g' utils/trivy/trivy.yaml - sed -i 's/format: sarif/format: table/g' utils/trivy/trivy.yaml - sed -i 's/exit-code: 0/exit-code: 1/g' utils/trivy/trivy.yaml - - - name: Run Trivy vulnerability scanner in filesystem mode (human readable format) - uses: aquasecurity/trivy-action@915b19bbe73b92a6cf82a1bc12b087c9a19a5fe2 # 0.28.0 - with: - scan-type: 'fs' - scan-ref: '.' - trivy-config: 'utils/trivy/trivy.yaml' diff --git a/.github/workflows/unit-testing.yml b/.github/workflows/unit-testing.yml deleted file mode 100644 index 3e386b48bf0..00000000000 --- a/.github/workflows/unit-testing.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Unit testing - -on: - pull_request: - -permissions: {} - -jobs: - Run_in_docker: - if: github.repository == 'daos-stack/daos' - runs-on: [self-hosted, docker] - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - name: Build deps in Docker - run: docker build . --file utils/docker/Dockerfile.el.9 - --build-arg DAOS_BUILD=no - --build-arg DEPS_JOBS=50 - - name: Build daos in Docker - run: docker build . --file utils/docker/Dockerfile.el.9 - --build-arg DEPS_JOBS=50 - --build-arg DAOS_KEEP_SRC=yes - --build-arg DAOS_TARGET_TYPE=debug - --build-arg DAOS_JAVA_BUILD=yes - --tag gha-amd-${{github.run_id}}-${{github.run_attempt}} - - name: Run Unit Testing - run: docker run gha-amd-${{github.run_id}}-${{github.run_attempt}} - ./daos/utils/ci/run_unit_tests_in_gha.sh diff --git a/.github/workflows/version-checks.yml b/.github/workflows/version-checks.yml deleted file mode 100644 index 4463460b2c5..00000000000 --- a/.github/workflows/version-checks.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Version checking - -on: - push: - branches: - - master - pull_request: - paths: - - 'utils/cq/requirements.txt' - -permissions: {} - -jobs: - upgrade-check: - name: Check for updates - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - matrix: - package: [pylint, yamllint, isort, codespell] - steps: - - name: Checkout code - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.12' - - name: Install extra python packages - run: python3 -m pip install --requirement utils/cq/requirements.txt - - name: Check ${{ matrix.package }} version - run: python -m ${{ matrix.package }} --version | tee -a version-pre - - name: Upgrade - run: pip install --upgrade ${{ matrix.package }} - - name: Check ${{ matrix.package }} for version - run: python -m ${{ matrix.package }} --version | diff version-pre -