From 18ca30a416f483ab370d12bb476a74f471c4d626 Mon Sep 17 00:00:00 2001 From: ninamu <32328018+ninamu@users.noreply.github.com> Date: Tue, 24 Sep 2024 17:48:08 +0200 Subject: [PATCH] consolidate scripts in worflow dir (#144) * consolidate scripts in worflow dir * move ci/cd to workflow/pipeline dir * pass everything useing CLI arguments, intstead of env. vars * simplify directory structure of workflow * use new pipeline config entry point in the ci.yml script * extended README --- README.md | 26 +++++ ci/ci.yml | 2 +- ci/configure-pipeline | 57 ----------- workflow/configure-pipeline | 95 +++++++++++++++++++ {ci/config => workflow/pipeline}/ci.py | 52 ++++++---- .../pipeline}/configuration.py | 0 .../pipeline}/requirements.txt | 0 {ci/config => workflow/pipeline}/schema.py | 0 .../pipeline}/schema/config.json | 0 .../pipeline}/templates/pipeline.yml | 4 +- {ci/config => workflow/pipeline}/util.py | 0 stack-build => workflow/stage-build | 13 +-- run-reframe => workflow/stage-test | 6 +- {scripts => workflow/util}/cache-configure | 0 {scripts => workflow/util}/generate-mirror | 2 +- {scripts => workflow/util}/setup-oras | 0 {scripts => workflow/util}/setup-stackinator | 2 +- 17 files changed, 171 insertions(+), 88 deletions(-) delete mode 100755 ci/configure-pipeline create mode 100755 workflow/configure-pipeline rename {ci/config => workflow/pipeline}/ci.py (69%) rename {ci/config => workflow/pipeline}/configuration.py (100%) rename {ci/config => workflow/pipeline}/requirements.txt (100%) rename {ci/config => workflow/pipeline}/schema.py (100%) rename {ci/config => workflow/pipeline}/schema/config.json (100%) rename {ci/config => workflow/pipeline}/templates/pipeline.yml (86%) rename {ci/config => workflow/pipeline}/util.py (100%) rename stack-build => workflow/stage-build (94%) rename run-reframe => workflow/stage-test (98%) rename {scripts => workflow/util}/cache-configure (100%) rename {scripts => workflow/util}/generate-mirror (96%) rename {scripts => workflow/util}/setup-oras (100%) rename {scripts => workflow/util}/setup-stackinator (96%) diff --git a/README.md b/README.md index 19dfdae0..1dbb725e 100644 --- a/README.md +++ b/README.md @@ -3,3 +3,29 @@ This repository manages the recipes for uenv on CSCS' Alps clusters, and the definition of the CI/CD pipeline that deploys them. See the [documentation](https://eth-cscs.github.io/alps-uenv/) for an overview of the uenv, and a packaging guide. + +### First steps + +The project is structured as follows: + + . + ├── ci # CI/CD pipeline configuration file + ├── docs # Documentation files, see `mkdocs.yml` + ├── recipes # uenv configuration files based on spack + ├── workflow # pipeline scripts and utils + ├── README.md + ├── config.yaml # define available target systems for uenv build + └── mkdocs.yml + +Creating a new uenv can be accomplished by adding a new recipe to the `recipes` folder as laid out under [recipe writing best practices](https://eth-cscs.github.io/alps-uenv/pkg-application-tutorial/) and updating the top-level `config.yaml` file with the uenv information and matching target system. The uenv itself is created via the script found in the `workflow` folder: + + . + ├── workflow # scripts and utils + │ ├── pipeline # CI/CD utils + │ ├── util # auxiliary scripts for uenv build and test + │ ├── configure-pipeline + │ ├── stage-build # script executed during pipeline build stage + │ └── stage-test # script executed during pipeline test stage + └── ... + +If testing locally, first invoke `configure-pipeline` to the `pipeline.yml` configuration file for the given target system. This file will then be consumed during the execution of the `stage-build` and `stage-test` scripts, which can be triggered locally for testing purposes but will in general be executed directly by the CI/CD runner. \ No newline at end of file diff --git a/ci/ci.yml b/ci/ci.yml index b6aae873..09d84385 100644 --- a/ci/ci.yml +++ b/ci/ci.yml @@ -7,7 +7,7 @@ pipeline-configure: tags: [rosa-k8s-lightweight] image: docker.io/python:latest script: - - ./ci/configure-pipeline + - ./workflow/configure-pipeline -c./config.yaml -r./recipes -s$system -u$uenv -a$uarch -o./pipeline.yml artifacts: paths: - pipeline.yml diff --git a/ci/configure-pipeline b/ci/configure-pipeline deleted file mode 100755 index 96a1f6da..00000000 --- a/ci/configure-pipeline +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env bash - -datetime () { date +"%Y-%m-%d %H:%M:%S"; } - -[[ -t 1 ]] && interm=yes - -red () { echo "\e[1;31m$1\e[m"; } -yellow () { echo "\e[1;33m$1\e[m"; } -log () { printf "$(yellow "[log $(datetime)]") $1\n"; } -err () { printf "$(red "[error $(datetime)]") $1\n"; exit 1; } - -script_path=$(dirname $(realpath $0)) -root_path=$(realpath "$script_path/..") -config_path="$script_path/config" - -# -# print status -# - -log "configuring cicd pipeline for alps uenv" -log " system= $system" -log " uarch = $uarch" -log " uenv = $uenv" -log "root_path =$root_path" -log "script_path=$script_path" -log "config_path=$config_path" - -# -# create temporary working path for python -# -tmp_path=/tmp/uenv$CI_JOB_ID -rm -rf $tmp_path -mkdir -p $tmp_path - -log "temporary path $tmp_path" - -# -# set up python environment -# -pyenv_path=$tmp_path/.pyenv - -python3 -m venv $pyenv_path -source $pyenv_path/bin/activate -log "created and loaded python venv in $pyenv_path" -log "$(python --version)" - -pip install --upgrade --quiet pip -pip install --quiet -r $config_path/requirements.txt -log "installed python dependencies" - -$config_path/ci.py -[[ $? -eq 0 ]] || err "unable to configure" - -log "configuration complete" - -log "the following pipeline was generated" -cat pipeline.yml diff --git a/workflow/configure-pipeline b/workflow/configure-pipeline new file mode 100755 index 00000000..9965d255 --- /dev/null +++ b/workflow/configure-pipeline @@ -0,0 +1,95 @@ +#!/usr/bin/env bash + +datetime () { date +"%Y-%m-%d %H:%M:%S"; } + +usage () { + echo "usage: configure -r recipe-path -c config-path" + echo "" + echo "where:" + echo " recipe-path: the location of the recipes" + echo ' config-path: the location of the config.yaml file' + echo "" + [[ "" == "$1" ]] && exit 0 + err "$1" +} + +[[ -t 1 ]] && interm=yes + +red () { echo "\e[1;31m$1\e[m"; } +yellow () { echo "\e[1;33m$1\e[m"; } +log () { printf "$(yellow "[log $(datetime)]") $1\n"; } +err () { printf "$(red "[error $(datetime)]") $1\n"; exit 1; } + +config_path="-" +recipe_path="-" +output_path="-" +uarch="-" +system="-" +uenv="-" + +while getopts c:r:o:s:u:a: flag +do + case "${flag}" in + c) config_path=${OPTARG};; + r) recipe_path=$(realpath "${OPTARG}");; + o) output_path=$(realpath "${OPTARG}");; + a) uarch=${OPTARG};; + s) system=${OPTARG};; + u) uenv=${OPTARG};; + esac +done + +[[ "-" == "${config_path}" ]] && usage "missing -c argument for config.yaml file" +[[ "-" == "${recipe_path}" ]] && usage "missing -r argument for the recipes path" +[[ "-" == "${output_path}" ]] && usage "missing -o argument for the output path" +[[ "-" == "${system}" ]] && usage "missing -s argument for system" +[[ "-" == "${uarch}" ]] && usage "missing -a argument for uarch" +[[ "-" == "${uenv}" ]] && usage "missing -u argument for uenv" + +script_path=$(dirname $(realpath $0)) +pipeline_path=${script_path}/pipeline + +# +# print status +# + +log "configuring cicd pipeline for alps uenv" +log " system = $system" +log " uarch = $uarch" +log " uenv = $uenv" +log "script_path = $script_path" +log "config_path = $config_path" +log "pipeline_path= $pipeline_path" +log "recipe_path = $recipe_path" +log "output_path = $output_path" + +# +# create temporary working path for python +# +#TODO: check if CI_JOB_ID exists otherwise create random +#CI_JOB_ID=squigglydot +tmp_path=/tmp/uenv$CI_JOB_ID +rm -rf $tmp_path +mkdir -p $tmp_path + +log "temporary path $tmp_path" + +# +# set up python environment +# +pyenv_path=$tmp_path/.pyenv + +python3 -m venv $pyenv_path +source $pyenv_path/bin/activate +log "created and loaded python venv in $pyenv_path" +log "$(python --version)" + +pip install --upgrade --quiet pip +pip install --quiet -r $pipeline_path/requirements.txt +log "installed python dependencies" + +${pipeline_path}/ci.py --recipes="$recipe_path" --config="$config_path" --uarch="$uarch" --uenv="$uenv" --system="$system" --output="$output_path" +[[ $? -eq 0 ]] || err "unable to configure" + +log "configuration complete" +log "the generated configuration ${output_path} is:\n\n$(cat $output_path)" diff --git a/ci/config/ci.py b/workflow/pipeline/ci.py similarity index 69% rename from ci/config/ci.py rename to workflow/pipeline/ci.py index b0e6843c..54a3d725 100755 --- a/ci/config/ci.py +++ b/workflow/pipeline/ci.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import argparse import jinja2 import jsonschema import os @@ -9,7 +10,6 @@ prefix = pathlib.Path(__file__).parent.resolve() root_path = prefix.parent.resolve().parent.resolve() -recipe_path = root_path / "recipes" sys.path = [prefix.as_posix()] + sys.path @@ -24,7 +24,7 @@ def __init__(self, message): self.message = f"Environment: {message}" super().__init__(self.message) -def readenv(config): +def readenv(config, args): """ returns a dictionary with the following fields: { @@ -41,16 +41,9 @@ def readenv(config): - uenv """ - system = os.getenv("system", default=None) - uarch = os.getenv("uarch", default=None) - target = os.getenv("uenv", default=None) - - if system is None: - raise EnvError("'system' environment variable not set") - if uarch is None: - raise EnvError("'uarch' environment variable not set") - if target is None: - raise EnvError("'uenv' environment variable not set") + system = args.system + uarch = args.uarch + target = args.uenv # check that system+uarch are valid if uarch not in configuration.valid_uarch: @@ -77,21 +70,44 @@ def readenv(config): "recipe": recipe, } +def make_argparser(): + parser = argparse.ArgumentParser(description=("Generate a build configuration for a spack stack from " "a recipe.")) + # strictly necessary always + parser.add_argument("-s", "--system", required=True, type=str) + parser.add_argument("-a", "--uarch", required=True, type=str) + parser.add_argument("-o", "--output", required=True, type=str) + # if config is split into clusters and recipes part, then + # only the cluster part would be required always + parser.add_argument("-c", "--config", required=True, type=str) + # alternatively a user could provide a single recipe, instead of a uenv argument + # that is looked up in the recipes path via the cluster config + parser.add_argument("-r", "--recipes", required=True, type=str) + parser.add_argument("-u", "--uenv", required=True, type=str) + + return parser + if __name__ == "__main__": ### TODO ### # read CLI arguments # - output path for the pipeline.yml file (required) - # - path of the configuration file (required) + # + path of the configuration file (required) # - JOB_ID (if needed?) if os.getenv("UENVCITEST", default=None) is not None: os.environ["system"] = "santis" os.environ["uarch"] = "gh200" os.environ["uenv"] = "netcdf-tools:2024" + try: + parser = make_argparser() + args = parser.parse_args() + except Exception as e: + print(f"ERROR parsing CLI arguments: str(e)") + exit(1) + # read and validate the configuration - print(recipe_path) try: - config = configuration.Config(prefix / "../../config.yaml", recipe_path) + config = configuration.Config(pathlib.Path(args.config),pathlib.Path(args.recipes)) + except jsonschema.exceptions.ValidationError as e: print() where = e.json_path.replace("$.","").replace(".", ":") @@ -105,7 +121,7 @@ def readenv(config): # read environment variables that describe image(s) to build in this run try: - env = readenv(config) + env = readenv(config, args) except EnvError as e: print() print(f"{util.colorize('[error] ', 'red')}{e.message}") @@ -135,7 +151,9 @@ def readenv(config): # generate top level makefiles pipeline_template = jinja_env.get_template("pipeline.yml") - with (root_path / "pipeline.yml").open("w") as f: + output_path = pathlib.Path(args.output) + with output_path.open("w") as f: f.write(pipeline_template.render(jobs=[job])) + print(f"\n{util.colorize('SUCCESS', 'green')} wrote {output_path} output file\n") diff --git a/ci/config/configuration.py b/workflow/pipeline/configuration.py similarity index 100% rename from ci/config/configuration.py rename to workflow/pipeline/configuration.py diff --git a/ci/config/requirements.txt b/workflow/pipeline/requirements.txt similarity index 100% rename from ci/config/requirements.txt rename to workflow/pipeline/requirements.txt diff --git a/ci/config/schema.py b/workflow/pipeline/schema.py similarity index 100% rename from ci/config/schema.py rename to workflow/pipeline/schema.py diff --git a/ci/config/schema/config.json b/workflow/pipeline/schema/config.json similarity index 100% rename from ci/config/schema/config.json rename to workflow/pipeline/schema/config.json diff --git a/ci/config/templates/pipeline.yml b/workflow/pipeline/templates/pipeline.yml similarity index 86% rename from ci/config/templates/pipeline.yml rename to workflow/pipeline/templates/pipeline.yml index 73ef48e5..4e9733c5 100644 --- a/ci/config/templates/pipeline.yml +++ b/workflow/pipeline/templates/pipeline.yml @@ -9,7 +9,7 @@ stages: SLURM_TIMELIMIT: 180 script: - echo "==== RECIPE $STACK_RECIPE" - - ./stack-build -n $STACK_NAME -s $STACK_SYSTEM -r $STACK_RECIPE -b /dev/shm/jenkssl $SPACK_DEVELOP -m $STACK_MOUNT -u $STACK_UARCH $NO_BWRAP + - ./workflow/stage-build -n $STACK_NAME -s $STACK_SYSTEM -r $STACK_RECIPE -b /dev/shm/jenkssl $SPACK_DEVELOP -m $STACK_MOUNT -u $STACK_UARCH $NO_BWRAP after_script: - rm -Rf /dev/shm/jenkssl @@ -20,7 +20,7 @@ stages: SLURM_TIMELIMIT: 30 GIT_STRATEGY: fetch script: - - ./run-reframe -n $STACK_NAME -s $STACK_SYSTEM -m $STACK_MOUNT -u $STACK_UARCH + - ./workflow/stage-test -n $STACK_NAME -s $STACK_SYSTEM -m $STACK_MOUNT -u $STACK_UARCH {% for job in jobs %} diff --git a/ci/config/util.py b/workflow/pipeline/util.py similarity index 100% rename from ci/config/util.py rename to workflow/pipeline/util.py diff --git a/stack-build b/workflow/stage-build similarity index 94% rename from stack-build rename to workflow/stage-build index d63e2eeb..1a7eec63 100755 --- a/stack-build +++ b/workflow/stage-build @@ -32,7 +32,7 @@ get_build_id () { } usage () { - echo "usage: stack-build -n name -r recipe-path -s system -b build-root -u uarch -m mount [-d] [-w]" + echo "usage: stage-build -n name -r recipe-path -s system -b build-root -u uarch -m mount [-d] [-w]" echo "" echo "where:" echo " name: the name of the stack" @@ -98,8 +98,8 @@ log "mount ${mount}" # Check if the recipe path exists [[ -d "${recipe_path}" ]] || err "recipe path '$recipe_path' does not exist" -# add scripts to PATH -export PATH="${ci_path}/scripts:$PATH" +# add workflow directory to PATH +export PATH="${ci_path}/workflow:$PATH" ## # Set up build cache flags if a build cache is available @@ -126,7 +126,8 @@ fi ## log "configuring oras" -source ${ci_path}/scripts/setup-oras +#TODO: revert after testing +source ${ci_path}/workflow/util/setup-oras ## @@ -134,7 +135,7 @@ source ${ci_path}/scripts/setup-oras ## # Initialise the stackinator -source ${ci_path}/scripts/setup-stackinator +source ${ci_path}/workflow/util/setup-stackinator ## # Configure the build @@ -164,7 +165,7 @@ cd "${build_path}" log "building image in ${build_path}" echo "env --ignore-environment PATH=/usr/bin:/bin:${PWD}/spack/bin make store.squashfs -j64" -# Propagate username and password defined in scripts/setup-oras (to be used in pre-install script) +# Propagate username and password defined in workflow/util/setup-oras (to be used in pre-install script) env --ignore-environment PATH=/usr/bin:/bin:${PWD}/spack/bin HOME=$HOME https_proxy=$https_proxy http_proxy=$http_proxy no_proxy="$no_proxy" CSCS_REGISTRY_USERNAME=$jfrog_u CSCS_REGISTRY_PASSWORD=$jfrog_p make store.squashfs -kj64 [[ $? -eq 0 ]] || env --ignore-environment PATH=/usr/bin:/bin:${PWD}/spack/bin HOME=$HOME https_proxy=$https_proxy http_proxy=$http_proxy no_proxy="$no_proxy" CSCS_REGISTRY_USERNAME=$jfrog_u CSCS_REGISTRY_PASSWORD=$jfrog_p make store.squashfs -kj8 diff --git a/run-reframe b/workflow/stage-test similarity index 98% rename from run-reframe rename to workflow/stage-test index c0f72fd3..5e6546a5 100755 --- a/run-reframe +++ b/workflow/stage-test @@ -1,6 +1,6 @@ -#!/bin/bash +!/bin/bash -#usage: run-reframe -s system -n name +#usage: stage-test -s system -n name datetime () { date +"%Y-%m-%d %H:%M:%S"; } @@ -61,7 +61,7 @@ log "ci_path ${ci_path}" log "test_path ${test_path}" # configure oras -source ${ci_path}/scripts/setup-oras +source ${ci_path}/worfklow/util/setup-oras # create the path in which tests will run... mkdir -p ${test_path} diff --git a/scripts/cache-configure b/workflow/util/cache-configure similarity index 100% rename from scripts/cache-configure rename to workflow/util/cache-configure diff --git a/scripts/generate-mirror b/workflow/util/generate-mirror similarity index 96% rename from scripts/generate-mirror rename to workflow/util/generate-mirror index afe6bab7..f3400e87 100755 --- a/scripts/generate-mirror +++ b/workflow/util/generate-mirror @@ -1,4 +1,4 @@ -# to be sourced from stack-build +# to be sourced from stage-build log "build cache: generating configuration" diff --git a/scripts/setup-oras b/workflow/util/setup-oras similarity index 100% rename from scripts/setup-oras rename to workflow/util/setup-oras diff --git a/scripts/setup-stackinator b/workflow/util/setup-stackinator similarity index 96% rename from scripts/setup-stackinator rename to workflow/util/setup-stackinator index 2a1010b0..4f4da42e 100755 --- a/scripts/setup-stackinator +++ b/workflow/util/setup-stackinator @@ -1,4 +1,4 @@ -# to be sourced from stack-build +# to be sourced from stage-build tool_base_path="$(pwd)" # check out a unique copy of stackinator for each ci task, so that