From dcbaf5505d3d8c6789fb50e3369aad1a2a43316b Mon Sep 17 00:00:00 2001 From: Ashley Pittman Date: Fri, 5 Apr 2024 15:35:32 +0100 Subject: [PATCH] DAOS-14369 test: Rework pydaos and and avocado install/setup process. (#13565) Update pydaos install process. Include pydaos sources in daos-devel rpm to allow installation on non-default python versions and virtual environments. Use updated python version for NLT to improve reporting and verify pydaos install process. Update dependencies for daos-client-tests rpm, add daos-devel to get pydaos sources and remove some no longer required python libs. Re-write entire python requirements.txt files whilst keeping install process the same. requirements.txt is now split into build, utest and ftest files, the origional file pulls in all three as well as cq for linting. Update the build to only install build requirements, have utest install utest requirements and ftest install ftest requirements. Include avocado 82 in ftest requirements so this is used universally throughout CI. Update code as required to no longer handle or patch older avocado versions and add check that at least 82 is in use. Signed-off-by: Ashley Pittman ashley.m.pittman@intel.com --- .dockerignore | 4 +- .github/workflows/landing-builds.yml | 3 +- .github/workflows/linting.yml | 2 +- .github/workflows/version-checks.yml | 3 + SConstruct | 3 +- ci/gha_helper.py | 2 + .../post_provision_config_nodes_EL_7.sh | 77 ----------- .../post_provision_config_nodes_EL_8.sh | 19 +-- .../post_provision_config_nodes_LEAP_15.sh | 9 -- ...ost_provision_config_nodes_UBUNTU_20_04.sh | 8 +- ci/rpm/test_daos_node.sh | 24 +++- ci/unit/required_packages.sh | 15 +-- ci/unit/test_main_node.sh | 13 ++ ci/unit/test_nlt.sh | 2 +- ci/unit/test_nlt_node.sh | 13 ++ debian/changelog | 6 + debian/daos-client-tests.install | 3 - debian/libdaos-dev.dirs | 3 +- debian/libdaos-dev.install | 1 + requirements-build.txt | 5 + requirements-ftest.txt | 6 + requirements-utest.txt | 6 + requirements.txt | 21 ++- src/client/SConscript | 22 +-- src/client/pydaos/SConscript | 58 +++++--- src/client/pydaos/pydaos_shim.c | 71 +++++----- src/client/setup.py | 52 +++---- src/control/server/init/setup_spdk.sh | 2 +- ...ocado-job-result_proxy-reference-fix.patch | 25 ---- .../avocado-report-test-phases-common.patch | 97 ------------- .../avocado-report-test-phases-py2.patch | 13 -- .../avocado-report-test-phases-py3.patch | 13 -- .../ftest/avocado-teardown-timeout.patch | 78 ----------- src/tests/ftest/cart/util/cart_logusage.py | 2 +- src/tests/ftest/launch.py | 20 ++- src/tests/ftest/mpiio/llnl_mpi4py.py | 5 +- src/tests/ftest/scripts/main.sh | 127 +++--------------- src/tests/ftest/util/apricot/apricot/test.py | 20 ++- src/tests/ftest/util/avocado_utils.py | 115 ++++------------ src/tests/ftest/util/environment_utils.py | 47 ++++--- src/tests/ftest/util/launch_utils.py | 2 +- utils/ansible/ftest/vars/defaults.yml | 4 +- utils/ci/run_in_gha.sh | 6 +- utils/cq/requirements.txt | 10 +- utils/docker/Dockerfile.el.8 | 8 +- utils/docker/Dockerfile.el.9 | 10 +- utils/docker/Dockerfile.leap.15 | 10 +- utils/docker/Dockerfile.ubuntu | 10 +- utils/node_local_test.py | 9 +- utils/rpms/daos.spec | 16 +-- 50 files changed, 341 insertions(+), 759 deletions(-) delete mode 100755 ci/provisioning/post_provision_config_nodes_EL_7.sh create mode 100644 requirements-build.txt create mode 100644 requirements-ftest.txt create mode 100644 requirements-utest.txt delete mode 100644 src/tests/ftest/avocado-job-result_proxy-reference-fix.patch delete mode 100644 src/tests/ftest/avocado-report-test-phases-common.patch delete mode 100644 src/tests/ftest/avocado-report-test-phases-py2.patch delete mode 100644 src/tests/ftest/avocado-report-test-phases-py3.patch delete mode 100644 src/tests/ftest/avocado-teardown-timeout.patch diff --git a/.dockerignore b/.dockerignore index ebb8dc8d966..201d9c7e482 100644 --- a/.dockerignore +++ b/.dockerignore @@ -24,7 +24,9 @@ !ftest.sh !site_scons !.clang-format -!requirements.txt +!requirements-build.txt +!requirements-utest.txt +!requirements-ftest.txt # Now disallow extra files which may be present but are not required. # Include the rdb repo as well. diff --git a/.github/workflows/landing-builds.yml b/.github/workflows/landing-builds.yml index df17144a2b6..f52eb5743d9 100644 --- a/.github/workflows/landing-builds.yml +++ b/.github/workflows/landing-builds.yml @@ -14,7 +14,8 @@ on: - 'utils/scripts/helpers/*' - utils/ci - ci - - requirements.txt + - requirements-build.txt + - requirements-utest.txt jobs: diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index f7c397f5da8..30ab00fe4f5 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -26,7 +26,7 @@ jobs: python-version: '3' - uses: isort/isort-action@master with: - requirementsFiles: "requirements.txt utils/cq/requirements.txt" + requirementsFiles: "requirements.txt" - name: Run on SConstruct file. run: isort --check-only SConstruct - name: Run on build files. diff --git a/.github/workflows/version-checks.yml b/.github/workflows/version-checks.yml index 5115598ff8d..41edc866a55 100644 --- a/.github/workflows/version-checks.yml +++ b/.github/workflows/version-checks.yml @@ -19,6 +19,9 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.12' - name: Install extra python packages run: python3 -m pip install --requirement utils/cq/requirements.txt - name: Check ${{ matrix.package }} version diff --git a/SConstruct b/SConstruct index 9dbcfaf2255..c367bba0069 100644 --- a/SConstruct +++ b/SConstruct @@ -480,9 +480,8 @@ def scons(): prereqs.save_build_info() # also install to $PREFIX/lib to work with existing avocado test code if prereqs.test_requested(): - env.Install('$PREFIX/lib/daos', ['.build_vars.sh', '.build_vars.json']) env.Install('$PREFIX/lib/daos/TESTING/ftest/util', ['site_scons/env_modules.py']) - env.Install('$PREFIX/lib/daos/TESTING/ftest/', ['ftest.sh']) + env.Install('$PREFIX/lib/daos/TESTING/ftest/', ['ftest.sh', "requirements-ftest.txt"]) env.Install("$PREFIX/lib64/daos", "VERSION") diff --git a/ci/gha_helper.py b/ci/gha_helper.py index ac4e2965b74..f0c013b4720 100755 --- a/ci/gha_helper.py +++ b/ci/gha_helper.py @@ -16,6 +16,8 @@ '.github/workflows/landing-builds.yml', '.dockerignore', 'requirements.txt', + 'requirements-build.txt', + 'requirements-utest.txt', 'ci/gha_helper.py'] COMMIT_CMD = ['git', 'rev-parse', '--short', 'HEAD'] diff --git a/ci/provisioning/post_provision_config_nodes_EL_7.sh b/ci/provisioning/post_provision_config_nodes_EL_7.sh deleted file mode 100755 index 28e10cec6f1..00000000000 --- a/ci/provisioning/post_provision_config_nodes_EL_7.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -if lspci | grep "ConnectX-6"; then - # No openmpi3 or MACSio-openmpi3 can be installed currently - # when the ConnnectX-6 driver is installed - INST_RPMS="${INST_RPMS// openmpi3/}" - INST_RPMS="${INST_RPMS// MACSio-openmpi3}" -fi - -bootstrap_dnf() { - timeout_cmd 5m yum -y install dnf 'dnf-command(config-manager)' -} - -group_repo_post() { - # Nothing to do for EL - : -} - -distro_custom() { - if [ ! -e /usr/bin/pip3 ] && - [ -e /usr/bin/pip3.6 ]; then - ln -s pip3.6 /usr/bin/pip3 - fi - if [ ! -e /usr/bin/python3 ] && - [ -e /usr/bin/python3.6 ]; then - ln -s python3.6 /usr/bin/python3 - fi - - # install the debuginfo repo in case we get segfaults - cat <<"EOF" > "$REPOS_DIR"/CentOS-Debuginfo.repo -[core-0-debuginfo] -name=CentOS-7 - Debuginfo -baseurl=http://debuginfo.centos.org/7/$basearch/ -gpgcheck=1 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-Debug-7 -enabled=0 -EOF - - # force install of avocado 69.x - dnf -y erase avocado{,-common} \ - python2-avocado{,-plugins-{output-html,varianter-yaml-to-mux}} \ - python36-PyYAML - pip3 install --upgrade pip - pip3 install "avocado-framework<70.0" - pip3 install "avocado-framework-plugin-result-html<70.0" - pip3 install "avocado-framework-plugin-varianter-yaml-to-mux<70.0" - pip3 install clustershell - - # Mellanox OFED hack - if ls -d /usr/mpi/gcc/openmpi-*; then - version="$(rpm -q --qf "%{version}" openmpi)" - mkdir -p /etc/modulefiles/mpi/ - cat < /etc/modulefiles/mpi/mlnx_openmpi-x86_64 -#%Module 1.0 -# -# OpenMPI module for use with 'environment-modules' package: -# -conflict mpi -prepend-path PATH /usr/mpi/gcc/openmpi-${version}/bin -prepend-path LD_LIBRARY_PATH /usr/mpi/gcc/openmpi-${version}/lib64 -prepend-path PKG_CONFIG_PATH /usr/mpi/gcc/openmpi-${version}/lib64/pkgconfig -prepend-path PYTHONPATH /usr/lib64/python2.7/site-packages/openmpi -prepend-path MANPATH /usr/mpi/gcc/openmpi-${version}/share/man -setenv MPI_BIN /usr/mpi/gcc/openmpi-${version}/bin -setenv MPI_SYSCONFIG /usr/mpi/gcc/openmpi-${version}/etc -setenv MPI_FORTRAN_MOD_DIR /usr/mpi/gcc/openmpi-${version}/lib64 -setenv MPI_INCLUDE /usr/mpi/gcc/openmpi-${version}/include -setenv MPI_LIB /usr/mpi/gcc/openmpi-${version}/lib64 -setenv MPI_MAN /usr/mpi/gcc/openmpi-${version}/share/man -setenv MPI_PYTHON_SITEARCH /usr/lib64/python2.7/site-packages/openmpi -setenv MPI_PYTHON2_SITEARCH /usr/lib64/python2.7/site-packages/openmpi -setenv MPI_COMPILER openmpi-x86_64 -setenv MPI_SUFFIX _openmpi -setenv MPI_HOME /usr/mpi/gcc/openmpi-${version} -EOF - fi -} diff --git a/ci/provisioning/post_provision_config_nodes_EL_8.sh b/ci/provisioning/post_provision_config_nodes_EL_8.sh index 0c73e1d0c96..2160df55e34 100644 --- a/ci/provisioning/post_provision_config_nodes_EL_8.sh +++ b/ci/provisioning/post_provision_config_nodes_EL_8.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# (C) Copyright 2021-2023 Intel Corporation. +# (C) Copyright 2021-2024 Intel Corporation. # # SPDX-License-Identifier: BSD-2-Clause-Patent @@ -15,19 +15,12 @@ group_repo_post() { } distro_custom() { - # install avocado - local avocado_rpms=(python3-avocado{,-plugins-{output-html,varianter-yaml-to-mux}}) - if [ -z "$(dnf repoquery "${avocado_rpms[@]}")" ]; then - avocado_rpms=() - pip install "avocado-framework<83.0" - pip install "avocado-framework-plugin-result-html<83.0" - pip install "avocado-framework-plugin-varianter-yaml-to-mux<83.0" - fi - dnf -y install "${avocado_rpms[@]}" clustershell - - # for Launchable's pip install - dnf -y install python3-setuptools.noarch + # TODO: This code is not exiting on failure. + # Use a more recent python version for unit testing, this allows us to also test installing + # pydaos into virtual environments. + dnf -y install python39 python39-devel + dnf -y install python3.11 python3.11-devel } install_mofed() { diff --git a/ci/provisioning/post_provision_config_nodes_LEAP_15.sh b/ci/provisioning/post_provision_config_nodes_LEAP_15.sh index 8154188fc23..2c7c66da133 100755 --- a/ci/provisioning/post_provision_config_nodes_LEAP_15.sh +++ b/ci/provisioning/post_provision_config_nodes_LEAP_15.sh @@ -16,13 +16,4 @@ distro_custom() { sed -e '/MODULEPATH=/s/$/:\/usr\/share\/modules/' \ /etc/profile.d/lmod.sh; \ fi - - # force install of avocado 69.x - dnf -y erase avocado{,-common} \ - python2-avocado{,-plugins-{output-html,varianter-yaml-to-mux}} - python3 -m pip install --upgrade pip - python3 -m pip install "avocado-framework<70.0" - python3 -m pip install "avocado-framework-plugin-result-html<70.0" - python3 -m pip install "avocado-framework-plugin-varianter-yaml-to-mux<70.0" - } diff --git a/ci/provisioning/post_provision_config_nodes_UBUNTU_20_04.sh b/ci/provisioning/post_provision_config_nodes_UBUNTU_20_04.sh index 5547756b074..484a678a0a8 100755 --- a/ci/provisioning/post_provision_config_nodes_UBUNTU_20_04.sh +++ b/ci/provisioning/post_provision_config_nodes_UBUNTU_20_04.sh @@ -48,9 +48,7 @@ post_provision_config_nodes() { fi fi - apt-get -y install avocado python3-avocado-plugins-output-html \ - python3-avocado-plugins-varianter-yaml-to-mux \ - lsb-core + apt-get -y install lsb-core # shellcheck disable=2086 if [ -n "$INST_RPMS" ] && @@ -63,10 +61,6 @@ post_provision_config_nodes() { return "$rc" fi - # temporary hack until Python 3 is supported by Functional testing - # possible TODO: support testing non-RPM testing - sed -ie '1s/2/3/' /usr/lib/daos/TESTING/ftest/launch.py - # change the default shell to bash -- we write a lot of bash chsh -s /bin/bash diff --git a/ci/rpm/test_daos_node.sh b/ci/rpm/test_daos_node.sh index 3e92bb7b703..9968d1ec49d 100755 --- a/ci/rpm/test_daos_node.sh +++ b/ci/rpm/test_daos_node.sh @@ -101,17 +101,29 @@ sudo chmod 0755 /tmp/daos_sockets sudo chown "$me:$me" /tmp/daos_sockets FTEST=/usr/lib/daos/TESTING/ftest -sudo PYTHONPATH="$FTEST/util" \ - $FTEST/config_file_gen.py -n "$HOSTNAME" \ - -a /etc/daos/daos_agent.yml \ - -s /etc/daos/daos_server.yml + +python3 -m venv venv +# shellcheck disable=SC1091 +source venv/bin/activate +pip install --upgrade pip +pip install -r $FTEST/requirements-ftest.txt + +sudo PYTHONPATH="$FTEST/util" \ + "${VIRTUAL_ENV}"/bin/python $FTEST/config_file_gen.py -n "$HOSTNAME" \ + -a /etc/daos/daos_agent.yml -s /etc/daos/daos_server.yml sudo bash -c 'echo "system_ram_reserved: 4" >> /etc/daos/daos_server.yml' sudo PYTHONPATH="$FTEST/util" \ - $FTEST/config_file_gen.py -n "$HOSTNAME" \ - -d /etc/daos/daos_control.yml + "${VIRTUAL_ENV}"/bin/python $FTEST/config_file_gen.py \ + -n "$HOSTNAME" -d /etc/daos/daos_control.yml cat /etc/daos/daos_server.yml cat /etc/daos/daos_agent.yml cat /etc/daos/daos_control.yml + +# python3.6 does not like deactivate with -u set, later versions are OK with it however. +set +u +deactivate +set -u + if ! module load "$OPENMPI"; then echo "Unable to load OpenMPI module: $OPENMPI" module avail diff --git a/ci/unit/required_packages.sh b/ci/unit/required_packages.sh index 86d2ac90fe2..07588d89935 100755 --- a/ci/unit/required_packages.sh +++ b/ci/unit/required_packages.sh @@ -2,17 +2,13 @@ set -eux -distro="$1" # No longer used but provided by pipeline-lib +# distro="$1" # quick_build="${2:-false}" -if [[ "$distro" = *7 ]]; then - OPENMPI_VER="3" - PY_MINOR_VER="6" -elif [[ "$distro" = *8 ]]; then - OPENMPI_VER="" - PY_MINOR_VER="" -fi +OPENMPI_VER="" +PY_MINOR_VER="" + pkgs="argobots \ boost-python3$PY_MINOR_VER-devel \ capstone \ @@ -32,9 +28,6 @@ pkgs="argobots \ patchelf \ pmix \ protobuf-c \ - python3$PY_MINOR_VER-junit_xml \ - python3$PY_MINOR_VER-pyxattr \ - python3$PY_MINOR_VER-tabulate \ spdk-devel \ valgrind-devel" diff --git a/ci/unit/test_main_node.sh b/ci/unit/test_main_node.sh index d12ddc7e3ef..a14b1fc3880 100755 --- a/ci/unit/test_main_node.sh +++ b/ci/unit/test_main_node.sh @@ -73,5 +73,18 @@ fi rm -rf "$test_log_dir" +# Use default python as that's where storage_estimator is installed. +python3 -m venv venv +# shellcheck disable=SC1091 +source venv/bin/activate +# touch venv/pip.conf +# pip config set global.progress_bar off +# pip config set global.no_color true + +pip install --upgrade pip +pip install --requirement requirements-utest.txt + +pip install /opt/daos/lib/daos/python/ + utils/run_utest.py $RUN_TEST_VALGRIND --no-fail-on-error $VDB_ARG --log_dir="$test_log_dir" \ $SUDO_ARG diff --git a/ci/unit/test_nlt.sh b/ci/unit/test_nlt.sh index dafcb4ae2eb..6270cba62ac 100755 --- a/ci/unit/test_nlt.sh +++ b/ci/unit/test_nlt.sh @@ -11,7 +11,7 @@ NODE=${NODELIST%%,*} mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" # Copy over the install tree and some of the build tree. -rsync -rlpt -z -e "ssh $SSH_KEY_ARGS" .build_vars* opt-daos.tar utils jenkins@"$NODE":build/ +rsync -rlpt -z -e "ssh $SSH_KEY_ARGS" .build_vars* opt-daos.tar utils requirements-utest.txt jenkins@"$NODE":build/ # shellcheck disable=SC2029 ssh -tt "$SSH_KEY_ARGS" jenkins@"$NODE" "$(cat "$mydir/test_nlt_node.sh")" diff --git a/ci/unit/test_nlt_node.sh b/ci/unit/test_nlt_node.sh index 4146c0af4ae..9975e290beb 100755 --- a/ci/unit/test_nlt_node.sh +++ b/ci/unit/test_nlt_node.sh @@ -24,5 +24,18 @@ sudo bash -c ". ./utils/sl/setup_local.sh; ./utils/setup_daos_server_helper.sh" # ./utils/node_local_test.py --max-log-size ????MiB --dfuse-dir /localhome/jenkins/ \ # --server-valgrind all +# Use the latest version that CI has available. +python3.11 -m venv venv +# shellcheck disable=SC1091 +source venv/bin/activate +touch venv/pip.conf +pip config set global.progress_bar off +pip config set global.no_color true + +pip install --upgrade pip +pip install --requirement requirements-utest.txt + +pip install /opt/daos/lib/daos/python/ + ./utils/node_local_test.py --max-log-size 1700MiB --dfuse-dir /localhome/jenkins/ \ --log-usage-save nltir.xml --log-usage-export nltr.json all diff --git a/debian/changelog b/debian/changelog index 71b8feb50a8..177e5d2410e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +daos (2.5.101-3) unstable; urgency=medium + [ Ashley M. Pittman ] + * Updated pydaos install process + + -- Ashley M. Pittman Thu, 04 Apr 2024 09:15:00 -0800 + daos (2.5.101-2) unstable; urgency=medium [ Jan Michalski ] * Add dtx_tests to the server-tests package diff --git a/debian/daos-client-tests.install b/debian/daos-client-tests.install index 45cf8f67154..b1c31a4ea5c 100644 --- a/debian/daos-client-tests.install +++ b/debian/daos-client-tests.install @@ -21,6 +21,3 @@ usr/bin/crt_launch usr/bin/daos_gen_io_conf usr/bin/daos_run_io_conf usr/lib64/libdpar.so -# For avocado tests -usr/lib/daos/.build_vars.json -usr/lib/daos/.build_vars.sh diff --git a/debian/libdaos-dev.dirs b/debian/libdaos-dev.dirs index 0a8bac4797c..2e45ba7fccd 100644 --- a/debian/libdaos-dev.dirs +++ b/debian/libdaos-dev.dirs @@ -1,4 +1,5 @@ usr/lib64 usr/include usr/include/cart -usr/include/gurt \ No newline at end of file +usr/include/gurt +usr/lib/daos/python diff --git a/debian/libdaos-dev.install b/debian/libdaos-dev.install index 1fec1b8974a..13400ef68ad 100644 --- a/debian/libdaos-dev.install +++ b/debian/libdaos-dev.install @@ -5,3 +5,4 @@ usr/lib64/libdaos.so usr/lib64/libgurt.so usr/lib64/libcart.so usr/lib64/*.a +usr/lib/daos/python/* diff --git a/requirements-build.txt b/requirements-build.txt new file mode 100644 index 00000000000..952e8a520db --- /dev/null +++ b/requirements-build.txt @@ -0,0 +1,5 @@ +scons +ninja +meson +distro +pyelftools diff --git a/requirements-ftest.txt b/requirements-ftest.txt new file mode 100644 index 00000000000..c0c0f2b8eb1 --- /dev/null +++ b/requirements-ftest.txt @@ -0,0 +1,6 @@ +avocado-framework==82 +avocado-framework-plugin-result-html==82 +avocado-framework-plugin-varianter-yaml-to-mux==82 +clustershell +paramiko +distro diff --git a/requirements-utest.txt b/requirements-utest.txt new file mode 100644 index 00000000000..26044cd9e41 --- /dev/null +++ b/requirements-utest.txt @@ -0,0 +1,6 @@ +tabulate +junit_xml +pyxattr +PyYAML +# Disabled for now because of DAOS-14688 +# pytest diff --git a/requirements.txt b/requirements.txt index 93ea995a9e0..12ba27468c7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,8 @@ -# Packages required to build/test DAOS. -defusedxml -distro -jira -junit_xml -meson -ninja -pyelftools -pyxattr -pyyaml -scons -tabulate -wheel +# Packages required to build DAOS. +-r requirements-build.txt +# Packages required to run ftest. +-r requirements-ftest.txt +# Packages required to run unit testing. +-r requirements-utest.txt +# Packages used for linting. +-r utils/cq/requirements.txt diff --git a/src/client/SConscript b/src/client/SConscript index 6a4530994c6..414a06002f1 100644 --- a/src/client/SConscript +++ b/src/client/SConscript @@ -1,23 +1,27 @@ """Build DAOS client""" -Import('prereqs') +import os def scons(): """Execute build""" + Import("prereqs", "env") + if not (prereqs.client_requested() or prereqs.server_requested()): return - SConscript('array/SConscript') - SConscript('kv/SConscript') - SConscript('api/SConscript') + SConscript("array/SConscript") + SConscript("kv/SConscript") + SConscript("api/SConscript") if prereqs.client_requested(): - SConscript('dfs/SConscript') - SConscript('dfuse/SConscript') - SConscript('pydaos/SConscript') - SConscript('serialize/SConscript') - SConscript('ds3/SConscript') + SConscript("dfs/SConscript") + SConscript("dfuse/SConscript") + SConscript("serialize/SConscript") + SConscript("ds3/SConscript") + + SConscript("pydaos/SConscript") + env.Install(os.path.join("$PREFIX", "lib/daos/python"), "setup.py") if __name__ == "SCons.Script": diff --git a/src/client/pydaos/SConscript b/src/client/pydaos/SConscript index 6894fde3b0e..6e781b58368 100644 --- a/src/client/pydaos/SConscript +++ b/src/client/pydaos/SConscript @@ -1,39 +1,58 @@ """Build pydaos client""" +import os import sys +SOURCES = ["pydaos_core.py", "pydaos_shim.c", "__init__.py"] +SOURCES_RAW = ["daos_cref.py", "__init__.py", "conversion.py", "daos_api.py"] + + +def install_shim_sources(): + """Install the sources required to build pydaos""" + + Import("env") + + env.Install(os.path.join("$PREFIX", "lib/daos/python/pydaos"), SOURCES) + for source in SOURCES_RAW: + env.Install( + os.path.join("$PREFIX", "lib/daos/python/pydaos/raw"), f"raw/{source}" + ) + def build_shim_module(): """Build PyDAOS shim module for the specified python version""" - if GetOption('help'): + if GetOption("help"): return - version = f'{sys.version_info.major}.{sys.version_info.minor}' + version = f"{sys.version_info.major}.{sys.version_info.minor}" - Import('base_env') + Import("base_env") new_env = base_env.Clone() - new_env.ParseConfig(f'pkg-config --cflags --libs python-{version}') + new_env.ParseConfig(f"pkg-config --cflags --libs python-{version}") - new_env.Replace(LIBS=['daos', 'duns']) - new_env.AppendUnique(LIBPATH=[Dir('../dfs')]) - new_env.AppendUnique(LIBPATH=[Dir('../api')]) + new_env.Replace(LIBS=["daos", "duns"]) + new_env.AppendUnique(LIBPATH=[Dir("../dfs")]) + new_env.AppendUnique(LIBPATH=[Dir("../api")]) - new_env['CC'] = 'gcc' - new_env.AppendUnique(CCFLAGS=['-pthread', '-Wno-missing-field-initializers']) + new_env["CC"] = "gcc" + new_env.AppendUnique(CCFLAGS=["-pthread", "-Wno-missing-field-initializers"]) new_env.compiler_setup() - obj = new_env.SharedObject('pydaos_shim', 'pydaos_shim.c', - SHLINKFLAGS=[], - SHLIBPREFIX="") - base = new_env.d_library(target='pydaos_shim', source=[obj], - install_off="../../../..", - SHLINK='gcc -pthread -shared', - SHLINKFLAGS=[], - SHLIBPREFIX="", - SHLIBSUFFIX='.so') - install_path = f'$PREFIX/lib64/python{version}/site-packages/pydaos' + obj = new_env.SharedObject( + "pydaos_shim", "pydaos_shim.c", SHLINKFLAGS=[], SHLIBPREFIX="" + ) + base = new_env.d_library( + target="pydaos_shim", + source=[obj], + install_off="../../../..", + SHLINK="gcc -pthread -shared", + SHLINKFLAGS=[], + SHLIBPREFIX="", + SHLIBSUFFIX=".so", + ) + install_path = f"$PREFIX/lib64/python{version}/site-packages/pydaos" new_env.Install(install_path, base) # install new wrappers too new_env.Install(install_path, "__init__.py") @@ -47,4 +66,5 @@ def build_shim_module(): if __name__ == "SCons.Script": + install_shim_sources() build_shim_module() diff --git a/src/client/pydaos/pydaos_shim.c b/src/client/pydaos/pydaos_shim.c index 1100ec7a368..22e671fbcdc 100644 --- a/src/client/pydaos/pydaos_shim.c +++ b/src/client/pydaos/pydaos_shim.c @@ -1,15 +1,9 @@ /** - * (C) Copyright 2019-2023 Intel Corporation. + * (C) Copyright 2019-2024 Intel Corporation. * * SPDX-License-Identifier: BSD-2-Clause-Patent */ -/* Those are gone from python3, replaced with new functions */ -#define PyInt_FromLong PyLong_FromLong -#define PyString_FromString PyUnicode_FromString -#define PyString_FromStringAndSize PyUnicode_FromStringAndSize -#define PyString_AsString PyBytes_AsString - #include #include @@ -90,8 +84,8 @@ do { \ } \ } while (0) -static daos_handle_t glob_eq; -static int use_glob_eq; +static daos_handle_t glob_eq; +static bool use_glob_eq; /** * Implementations of baseline shim functions @@ -101,24 +95,22 @@ static PyObject * __shim_handle__daos_init(PyObject *self, PyObject *args) { int rc; - int ret; - char *override; rc = daos_init(); if ((rc == 0) && (use_glob_eq == 0)) { - d_agetenv_str(&override, "PYDAOS_GLOB_EQ"); - if ((override == NULL) || strcmp(override, "0")) { - use_glob_eq = 1; + d_getenv_bool("PYDAOS_GLOB_EQ", &use_glob_eq); + if (use_glob_eq) { + int ret; + ret = daos_eq_create(&glob_eq); if (ret) { - D_ERROR("Failed to create global eq, "DF_RC"\n", DP_RC(ret)); - use_glob_eq = 0; + DL_ERROR(ret, "Failed to create global eq"); + use_glob_eq = false; } } - d_freeenv_str(&override); } - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } static PyObject * @@ -130,12 +122,12 @@ __shim_handle__daos_fini(PyObject *self, PyObject *args) rc = daos_eq_destroy(glob_eq, DAOS_EQ_DESTROY_FORCE); if (rc) D_ERROR("Failed to destroy global eq, "DF_RC"\n", DP_RC(rc)); - use_glob_eq = 0; + use_glob_eq = false; } rc = daos_fini(); - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } static PyObject * @@ -153,7 +145,7 @@ __shim_handle__err_to_str(PyObject *self, PyObject *args) return Py_None; } - return PyString_FromString(str); + return PyUnicode_FromString(str); } /** @@ -265,7 +257,7 @@ cont_open(int ret, char *pool, char *cont, int flags) /* Populate return list */ return_list = PyList_New(2); - PyList_SetItem(return_list, 0, PyInt_FromLong(rc)); + PyList_SetItem(return_list, 0, PyLong_FromLong(rc)); PyList_SetItem(return_list, 1, PyLong_FromVoidPtr(hdl)); return return_list; @@ -343,10 +335,10 @@ __shim_handle__cont_get(PyObject *self, PyObject *args) out: /* Populate return list */ return_list = PyList_New(4); - PyList_SetItem(return_list, 0, PyInt_FromLong(rc)); + PyList_SetItem(return_list, 0, PyLong_FromLong(rc)); PyList_SetItem(return_list, 1, PyLong_FromLong(oid.hi)); PyList_SetItem(return_list, 2, PyLong_FromLong(oid.lo)); - PyList_SetItem(return_list, 3, PyInt_FromLong(otype)); + PyList_SetItem(return_list, 3, PyLong_FromLong(otype)); return return_list; } @@ -408,7 +400,7 @@ __shim_handle__cont_newobj(PyObject *self, PyObject *args) out: /* Populate return list */ return_list = PyList_New(3); - PyList_SetItem(return_list, 0, PyInt_FromLong(rc)); + PyList_SetItem(return_list, 0, PyLong_FromLong(rc)); PyList_SetItem(return_list, 1, PyLong_FromLong(oid.hi)); PyList_SetItem(return_list, 2, PyLong_FromLong(oid.lo)); @@ -442,7 +434,7 @@ __shim_handle__cont_close(PyObject *self, PyObject *args) if (rc == 0) D_FREE(hdl); - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } #define ITER_NR 96 @@ -684,7 +676,7 @@ cont_check(int ret, char *pool, char *cont, int flags) rc = rc2; } - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } static PyObject * @@ -872,7 +864,7 @@ __shim_handle__kv_open(PyObject *self, PyObject *args) /* Populate return list */ return_list = PyList_New(2); - PyList_SetItem(return_list, 0, PyInt_FromLong(rc)); + PyList_SetItem(return_list, 0, PyLong_FromLong(rc)); PyList_SetItem(return_list, 1, PyLong_FromLong(oh.cookie)); return return_list; @@ -890,7 +882,7 @@ __shim_handle__kv_close(PyObject *self, PyObject *args) /** Close object */ rc = daos_kv_close(oh, NULL); - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } /** @@ -960,7 +952,7 @@ __shim_handle__kv_get(PyObject *self, PyObject *args) if (!use_glob_eq) { rc = daos_eq_create(&eq); if (rc) - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } else { eq = glob_eq; } @@ -1045,7 +1037,7 @@ __shim_handle__kv_get(PyObject *self, PyObject *args) if (PyUnicode_Check(key)) { op->key = (char *)PyUnicode_AsUTF8(key); } else { - op->key = PyString_AsString(key); + op->key = PyBytes_AsString(key); } if (!op->key) D_GOTO(err, rc = 0); @@ -1116,7 +1108,7 @@ __shim_handle__kv_get(PyObject *self, PyObject *args) } /* Populate return list */ - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); err: if (!use_glob_eq) @@ -1148,7 +1140,7 @@ __shim_handle__kv_put(PyObject *self, PyObject *args) if (!use_glob_eq) { rc = daos_eq_create(&eq); if (rc) - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); } else { eq = glob_eq; } @@ -1207,7 +1199,7 @@ __shim_handle__kv_put(PyObject *self, PyObject *args) if (PyUnicode_Check(key)) { key_str = (char *)PyUnicode_AsUTF8(key); } else { - key_str = PyString_AsString(key); + key_str = PyBytes_AsString(key); } if (!key_str) D_GOTO(err, rc = 0); @@ -1239,7 +1231,7 @@ __shim_handle__kv_put(PyObject *self, PyObject *args) rc = ret; } - return PyInt_FromLong(rc); + return PyLong_FromLong(rc); err: if (!use_glob_eq) daos_eq_destroy(eq, 0); @@ -1359,8 +1351,7 @@ __shim_handle__kv_iter(PyObject *self, PyObject *args) for (ptr = enum_buf, i = 0; i < nr; i++) { Py_ssize_t len = kds[i].kd_key_len; - rc = PyList_Append(entries, - PyString_FromStringAndSize(ptr, len)); + rc = PyList_Append(entries, PyUnicode_FromStringAndSize(ptr, len)); if (rc < 0) { rc = -DER_IO; break; @@ -1394,9 +1385,9 @@ __shim_handle__kv_iter(PyObject *self, PyObject *args) /* Populate return list */ return_list = PyList_New(4); - PyList_SetItem(return_list, 0, PyInt_FromLong(rc)); - PyList_SetItem(return_list, 1, PyInt_FromLong(nr_req)); - PyList_SetItem(return_list, 2, PyInt_FromLong(size)); + PyList_SetItem(return_list, 0, PyLong_FromLong(rc)); + PyList_SetItem(return_list, 1, PyLong_FromLong(nr_req)); + PyList_SetItem(return_list, 2, PyLong_FromLong(size)); if (rc || daos_anchor_is_eof(anchor)) { if (anchor_cap != NULL) Py_DECREF(anchor_cap); diff --git a/src/client/setup.py b/src/client/setup.py index 668e7871d47..14155c75f45 100644 --- a/src/client/setup.py +++ b/src/client/setup.py @@ -3,52 +3,34 @@ To use type: +pip install . + +or for older systems: + python3 setup.py install -If run from within a compiled DAOS source tree this it will detect the -install path automatically, otherwise it'll use the defaults. +This can be run from either the installed daos packages or from a install directory, however python +requires write access to the directory to install so if installing from rpms then a copy may have to +be made before install. """ -import json import os from setuptools import Extension, find_packages, setup +args = {"sources": ["pydaos/pydaos_shim.c"], "libraries": ["daos", "duns"]} -def load_conf(): - """Load the build config file""" - file_self = os.path.dirname(os.path.abspath(__file__)) - while file_self != '/': - new_file = os.path.join(file_self, '.build_vars.json') - if os.path.exists(new_file): - with open(new_file, 'r', encoding='utf-8') as ofh: - return json.load(ofh) - - file_self = os.path.dirname(file_self) - return None - - -conf = load_conf() - -args = {'sources': ['pydaos/pydaos_shim.c'], - 'libraries': ['daos', 'duns']} - -if conf: - args['include_dirs'] = [os.path.join(conf['PREFIX'], 'include')] - if conf.get('CART_PREFIX', None): - args['include_dirs'].extend(os.path.join( - conf['CART_PREFIX'], 'include')) - args['library_dirs'] = [os.path.join(conf['PREFIX'], 'lib64')] - args['runtime_library_dirs'] = args['library_dirs'] - +prefix_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "..") -args['define_macros'] = [('__USE_PYTHON3__', 1)] +if os.path.exists(os.path.join(prefix_dir, "include", "daos.h")): + args["include_dirs"] = [os.path.join(prefix_dir, "include")] + args["library_dirs"] = [os.path.join(prefix_dir, "lib64")] + args["runtime_library_dirs"] = args["library_dirs"] -module1 = Extension('pydaos.pydaos_shim', **args) setup( - name='pydaos', - version='0.2', + name="pydaos", + version="0.3", packages=find_packages(), - description='DAOS interface', - ext_modules=[module1] + description="DAOS interface", + ext_modules=[Extension("pydaos.pydaos_shim", **args)], ) diff --git a/src/control/server/init/setup_spdk.sh b/src/control/server/init/setup_spdk.sh index 7bf7766880d..7a006efcfd9 100755 --- a/src/control/server/init/setup_spdk.sh +++ b/src/control/server/init/setup_spdk.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/bash ## Wrap spdk setup script. This script will be called by daos_server_helper process which will be ## running with elevated privileges. Activities include changing directory permissions (which diff --git a/src/tests/ftest/avocado-job-result_proxy-reference-fix.patch b/src/tests/ftest/avocado-job-result_proxy-reference-fix.patch deleted file mode 100644 index 787c5aa9fa8..00000000000 --- a/src/tests/ftest/avocado-job-result_proxy-reference-fix.patch +++ /dev/null @@ -1,25 +0,0 @@ -commit 6d023677f5561b944da0f6358d982e88cefd3945 -Author: Brian J. Murrell -Date: Mon Dec 7 09:46:17 2020 -0500 - - Fix orphan Job().result_proxy reference - - Job().result_proxy was removed in ed4dcd1 however references to it - remained. - - This removes one of the remaining references. - - Signed-off-by: Brian J. Murrell - -diff --git a/avocado/core/runner.py b/avocado/core/runner.py -index 787c0669..104a7dac 100644 ---- a/avocado/core/runner.py -+++ b/avocado/core/runner.py -@@ -184,7 +184,6 @@ class TestStatus(object): - self.interrupt = True - elif "paused" in msg: - self.status = msg -- self.job.result_proxy.notify_progress(False) - self.job._result_events_dispatcher.map_method('test_progress', - False) - if msg['paused']: diff --git a/src/tests/ftest/avocado-report-test-phases-common.patch b/src/tests/ftest/avocado-report-test-phases-common.patch deleted file mode 100644 index e914bc8b6e6..00000000000 --- a/src/tests/ftest/avocado-report-test-phases-common.patch +++ /dev/null @@ -1,97 +0,0 @@ -commit 8467f4b41066cb8d2a9993325a1a0f2511c99bd6 -Author: Brian J. Murrell -Date: Mon Dec 7 11:03:59 2020 -0500 - - Test: report test phases - - The avocado test runner mostly "flies blind" when it comes to the - individual test phases, that is, it doesn't know if a test is - currently being initialized, running its setup, running the test - method itself or its teardown. - - With this, the runner gets access to that information, and can act - differently based on this knowledge. One use case is to allow for - different timeouts on different test phases. - - Signed-off-by: Cleber Rosa - Co-authored-by: Brian J. Murrell - -diff --git a/avocado/core/test.py b/avocado/core/test.py -index 4ff4ee03..73241b4e 100644 ---- a/avocado/core/test.py -+++ b/avocado/core/test.py -@@ -168,6 +168,8 @@ class Test(unittest.TestCase): - :param job: The job that this test is part of. - :raises: :class:`avocado.core.test.NameNotTestNameError` - """ -+ self.__phase = 'INIT' -+ - def record_and_warn(*args, **kwargs): - """ Record call to this function and log warning """ - if not self.__log_warn_used: -@@ -419,6 +421,15 @@ class Test(unittest.TestCase): - def traceback(self): - return self.__traceback - -+ @property -+ def phase(self): -+ """ -+ The current phase of the test execution -+ -+ Possible (string) values are: INIT, SETUP, TEST, TEARDOWN and FINISHED -+ """ -+ return self.__phase -+ - def __str__(self): - return str(self.name) - -@@ -565,6 +576,7 @@ class Test(unittest.TestCase): - skip_test = getattr(testMethod, '__skip_test_decorator__', False) - try: - if skip_test is False: -+ self.__phase = 'SETUP' - self.setUp() - except (exceptions.TestSetupSkip, exceptions.TestSkipError) as details: - stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB) -@@ -578,6 +590,7 @@ class Test(unittest.TestCase): - raise exceptions.TestSetupFail(details) - else: - try: -+ self.__phase = 'TEST' - testMethod() - except exceptions.TestSetupSkip as details: - stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB) -@@ -605,6 +618,7 @@ class Test(unittest.TestCase): - finally: - try: - if skip_test is False: -+ self.__phase = 'TEARDOWN' - self.tearDown() - except exceptions.TestSetupSkip as details: - stacktrace.log_exc_info(sys.exc_info(), logger=LOG_JOB) -@@ -728,6 +742,7 @@ class Test(unittest.TestCase): - for e_line in tb_info: - self.log.error(e_line) - finally: -+ self.__phase = 'FINISHED' - self._tag_end() - self._report() - self.log.info("") -diff --git a/selftests/functional/test_basic.py b/selftests/functional/test_basic.py -index 468b7d57..ed494733 100644 ---- a/selftests/functional/test_basic.py -+++ b/selftests/functional/test_basic.py -@@ -184,6 +184,13 @@ class RunnerOperationTest(unittest.TestCase): - self.assertIn(' data ' + mapping['data_dir'], result.stdout) - self.assertIn(' logs ' + mapping['logs_dir'], result.stdout) - -+ def test_runner_phases(self): -+ cmd_line = ('%s run --sysinfo=off --job-results-dir %s ' -+ 'phases.py' % (AVOCADO, self.tmpdir)) -+ result = process.run(cmd_line) -+ expected_rc = exit_codes.AVOCADO_ALL_OK -+ self.assertEqual(result.exit_status, expected_rc, -+ "Avocado did not return rc %d:\n%s" % (expected_rc, result)) - def test_runner_all_ok(self): - os.chdir(basedir) - cmd_line = ('%s run --sysinfo=off --job-results-dir %s ' diff --git a/src/tests/ftest/avocado-report-test-phases-py2.patch b/src/tests/ftest/avocado-report-test-phases-py2.patch deleted file mode 100644 index bab68fc6ffc..00000000000 --- a/src/tests/ftest/avocado-report-test-phases-py2.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/avocado/core/test.py b/avocado/core/test.py -index 4ff4ee03..73241b4e 100644 ---- a/avocado/core/test.py -+++ b/avocado/core/test.py -@@ -461,7 +472,7 @@ class Test(unittest.TestCase): - 'status', 'time_elapsed', - 'traceback', 'workdir', 'whiteboard', 'time_start', - 'time_end', 'running', 'paused', 'paused_msg', -- 'fail_class', 'params', "timeout"] -+ 'fail_class', 'params', "timeout", 'phase'] - state = {key: getattr(self, key, None) for (key) in preserve_attr} - state['class_name'] = self.__class__.__name__ - state['job_logdir'] = self.job.logdir diff --git a/src/tests/ftest/avocado-report-test-phases-py3.patch b/src/tests/ftest/avocado-report-test-phases-py3.patch deleted file mode 100644 index 1c8c3c08733..00000000000 --- a/src/tests/ftest/avocado-report-test-phases-py3.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/avocado/core/test.py b/avocado/core/test.py -index 4ff4ee03..73241b4e 100644 ---- a/avocado/core/test.py -+++ b/avocado/core/test.py -@@ -62,7 +62,7 @@ - 'status', 'running', 'paused', - 'time_start', 'time_elapsed', 'time_end', - 'fail_reason', 'fail_class', 'traceback', -- 'timeout', 'whiteboard') -+ 'timeout', 'whiteboard', 'phase') - - - class RawFileHandler(logging.FileHandler): diff --git a/src/tests/ftest/avocado-teardown-timeout.patch b/src/tests/ftest/avocado-teardown-timeout.patch deleted file mode 100644 index efccde3acfd..00000000000 --- a/src/tests/ftest/avocado-teardown-timeout.patch +++ /dev/null @@ -1,78 +0,0 @@ -commit dca97da427c6b1290348f117f54f1282d841e0e8 -Author: Brian J. Murrell -Date: Mon Dec 7 09:41:12 2020 -0500 - - Give tearDown() 60 seconds to complete - - Even if a test uses up the entire timeout, give tearDown() an extra 60 - seconds to get it's work done. - - Signed-off-by: Brian J. Murrell - -diff --git a/avocado/core/runner.py b/avocado/core/runner.py -index 104a7dac..ca349e98 100644 ---- a/avocado/core/runner.py -+++ b/avocado/core/runner.py -@@ -45,6 +45,8 @@ TIMEOUT_TEST_INTERRUPTED = 1 - TIMEOUT_PROCESS_DIED = 10 - #: when test reported status but the process did not finish - TIMEOUT_PROCESS_ALIVE = 60 -+#: extra timeout to give to a test in TEARDOWN phase -+TIMEOUT_TEARDOWN = 60 - - - def add_runner_failure(test_state, new_status, message): -@@ -220,7 +222,7 @@ class TestStatus(object): - step) - if self.status: # status exists, wait for process to finish - deadline = min(deadline, time.time() + TIMEOUT_PROCESS_ALIVE) -- while time.time() < deadline: -+ while time.time() < deadline + TIMEOUT_TEARDOWN: - result_dispatcher.map_method('test_progress', False) - if wait.wait_for(lambda: not proc.is_alive(), 1, 0, - step): -@@ -413,7 +415,12 @@ class TestRunner(object): - - while True: - try: -- if time.time() >= deadline: -+ now = time.time() -+ if test_status.status.get('phase') == 'TEARDOWN': -+ reached = now >= deadline + TIMEOUT_TEARDOWN -+ else: -+ reached = now >= deadline -+ if reached: - abort_reason = "Timeout reached" - try: - os.kill(proc.pid, signal.SIGTERM) - except OSError: - pass - break - wait.wait_for(lambda: not queue.empty() or not proc.is_alive(), - cycle_timeout, first, step) - if test_status.interrupt: -diff --git a/avocado/core/test.py b/avocado/core/test.py -index 4ff4ee03..d5abc3e0 100644 ---- a/avocado/core/test.py -+++ b/avocado/core/test.py -@@ -252,6 +252,20 @@ class Test(unittest.TestCase): - - unittest.TestCase.__init__(self, methodName=methodName) - -+ def setUp(self): -+ """ -+ Set up -+ """ -+ self.report_state() -+ super().setUp() -+ -+ def tearDown(self): -+ """ -+ Tear down after each test case -+ """ -+ self.report_state() -+ super().tearDown() -+ - @property - def name(self): - """ diff --git a/src/tests/ftest/cart/util/cart_logusage.py b/src/tests/ftest/cart/util/cart_logusage.py index 358718141de..0c491a7e84e 100644 --- a/src/tests/ftest/cart/util/cart_logusage.py +++ b/src/tests/ftest/cart/util/cart_logusage.py @@ -149,7 +149,7 @@ def _save(self, fd): if not dname.startswith('src'): if dname == '': dname = 'src/control/cmd/daos' - else: + elif "src" in dname: parts = dname.split('/') while parts[0] != 'src': parts.pop(0) diff --git a/src/tests/ftest/launch.py b/src/tests/ftest/launch.py index 7b92bc0cfb5..2f168891d37 100755 --- a/src/tests/ftest/launch.py +++ b/src/tests/ftest/launch.py @@ -34,6 +34,10 @@ MAX_CI_REPETITIONS = 10 +class LaunchError(Exception): + """Error when launching Avocado""" + + class Launch(): """Class to launch avocado tests.""" @@ -136,9 +140,10 @@ def _configure(self, overwrite_config=False): self.avocado.set_config(overwrite_config) # Configure the logfile - self.avocado.set_version(logger) - self.logdir = self.avocado.get_directory( - logger, os.path.join("launch", self.name.lower()), False) + self.avocado.set_version() + if self.avocado.major < 82: + raise LaunchError("Avocado version 82 or above required") + self.logdir = self.avocado.get_directory(os.path.join("launch", self.name.lower())) self.logfile = os.path.join(self.logdir, "job.log") # Rename the launch log directory if one exists @@ -153,7 +158,7 @@ def _configure(self, overwrite_config=False): logger.info("-" * 80) logger.info("DAOS functional test launcher") logger.info("") - logger.info("Running with %s", self.avocado) + logger.info("Running with %s on python %s", self.avocado, sys.version) logger.info("Launch job results directory: %s", self.logdir) if renamed_log_dir is not None: logger.info(" Renamed existing launch job results directory to %s", renamed_log_dir) @@ -161,8 +166,8 @@ def _configure(self, overwrite_config=False): logger.info("-" * 80) # Results tracking settings - self.job_results_dir = self.avocado.get_logs_dir(logger) - max_chars = self.avocado.get_setting(logger, "job.run.result.xunit", "max_test_log_chars") + self.job_results_dir = self.avocado.get_logs_dir() + max_chars = self.avocado.get_setting("job.run.result.xunit", "max_test_log_chars") self.job = Job( self.name, xml_enabled="on", html_enabled="on", log_dir=self.logdir, max_chars=max_chars) @@ -199,7 +204,6 @@ def _create_log_dir(self): return old_launch_log_dir def run(self, args): - # pylint: disable=too-many-return-statements """Perform the actions specified by the command line arguments. Args: @@ -210,6 +214,8 @@ def run(self, args): """ try: status = self._run(args) + except LaunchError as error: + return self.get_exit_status(1, error, error) except Exception as error: # pylint: disable=broad-except message = f"Unknown exception raised during launch.py execution: {error}" status = self.get_exit_status(1, message, "Unknown", sys.exc_info()) diff --git a/src/tests/ftest/mpiio/llnl_mpi4py.py b/src/tests/ftest/mpiio/llnl_mpi4py.py index 1800d70e71f..0ec142c6d7d 100644 --- a/src/tests/ftest/mpiio/llnl_mpi4py.py +++ b/src/tests/ftest/mpiio/llnl_mpi4py.py @@ -1,5 +1,5 @@ """ - (C) Copyright 2019-2023 Intel Corporation. + (C) Copyright 2019-2024 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent """ @@ -26,7 +26,8 @@ def get_test_repo(self, name): str: python site-packages path to the test repository """ test_repo = self.params.get(name, '/run/test_repo/') - for packages in site.getsitepackages(): + # DAOS-15602: Always check the python3-6 install for test sources. + for packages in site.getsitepackages() + ["/usr/lib64/python3.6/site-packages"]: test_path = os.path.join(packages, test_repo) if os.path.exists(test_path): return test_path diff --git a/src/tests/ftest/scripts/main.sh b/src/tests/ftest/scripts/main.sh index 68f6b5a4e1a..e6079637064 100755 --- a/src/tests/ftest/scripts/main.sh +++ b/src/tests/ftest/scripts/main.sh @@ -1,7 +1,7 @@ #!/bin/bash # shellcheck disable=SC1113 # /* -# * (C) Copyright 2016-2023 Intel Corporation. +# * (C) Copyright 2016-2024 Intel Corporation. # * # * SPDX-License-Identifier: BSD-2-Clause-Patent # */ @@ -17,6 +17,18 @@ fi # shellcheck disable=SC2153 mapfile -t TEST_TAG_ARR <<< "$TEST_TAG_ARG" +if [ -d venv ] +then + rm -rf venv +fi + +python3 -m venv venv +# shellcheck disable=SC1091 +source venv/bin/activate + +pip install --upgrade pip +pip install -r "$PREFIX"/lib/daos/TESTING/ftest/requirements-ftest.txt + if $TEST_RPMS; then rm -rf "$PWD"/install/tmp mkdir -p "$PWD"/install/tmp @@ -32,6 +44,13 @@ else cd "$DAOS_BASE" fi +# Copy the pydaos source locally and install it, in an ideal world this would install +# from the read-only tree directly but for now that isn't working. +# https://github.com/pypa/setuptools/issues/3237 +cp -a "$PREFIX"/lib/daos/python pydaos +pip install ./pydaos +rm -rf pydaos + # Disable D_PROVIDER to allow launch.py to set it unset D_PROVIDER @@ -44,110 +63,6 @@ unset D_INTERFACE # shellcheck disable=SC2153 export D_LOG_FILE="$TEST_TAG_DIR/daos.log" -# apply patches to Avocado -pydir="" -for loc in /usr/lib/python2*/site-packages/ \ - /usr/lib/python3*/site-packages/ \ - /usr/local/lib/python3*/site-packages/; do - if [ -f "$loc"/avocado/core/runner.py ]; then - pydir=$loc - break - fi -done -if [ -z "${pydir}" ]; then - echo "Could not determine avocado installation location" - exit 1 -fi - -PATCH_DIR="$PREFIX"/lib/daos/TESTING/ftest -# https://github.com/avocado-framework/avocado/pull/4345 fixed somewhere -# before 69.2 -if grep "self.job.result_proxy.notify_progress(False)" \ - "$pydir"/avocado/core/runner.py; then - echo "Applying patch avocado-job-result_proxy-reference-fix.patch" - if ! cat < "$PATCH_DIR"/avocado-job-result_proxy-reference-fix.patch | \ - sudo patch -p1 -d "$pydir"; then - echo "Failed to apply avocado PR-4345 patch" - exit 1 - fi -fi -# https://github.com/avocado-framework/avocado/pull/2908 fixed in -# https://github.com/avocado-framework/avocado/pull/3076/ -if ! grep "runner.timeout.process_died" "$pydir"/avocado/core/runner.py; then - # this version of runner.py is older than 82.0 - if ! grep TIMEOUT_TEARDOWN "$pydir"/avocado/core/runner.py; then - echo "Applying patch avocado-teardown-timeout.patch" - if ! cat < "$PATCH_DIR"/avocado-teardown-timeout.patch | \ - sudo patch -p1 -d "$pydir"; then - echo "Failed to apply avocado PR-3076 patch" - exit 1 - fi - fi -fi -# https://github.com/avocado-framework/avocado/pull/3154 - fixed somewhere -# before 69.2 -if ! grep "def phase(self)" \ - "$pydir"/avocado/core/test.py; then - echo "Applying patch avocado-report-test-phases-common.patch" - if ! filterdiff -p1 -x selftests/* < \ - "$PATCH_DIR"/avocado-report-test-phases-common.patch | \ - sed -e '/selftests\/.*/d' | \ - sudo patch -p1 -d "$pydir"; then - echo "Failed to apply avocado PR-3154 patch - common portion" - exit 1 - fi - if grep "^TEST_STATE_ATTRIBUTES = " "$pydir"/avocado/core/test.py; then - echo "Applying patch avocado-report-test-phases-py3.patch" - if ! cat < "$PATCH_DIR"/avocado-report-test-phases-py3.patch | \ - sudo patch -p1 -d "$pydir"; then - echo "Failed to apply avocado PR-3154 patch - py3 portion" - exit 1 - fi - else - echo "Applying patch avocado-report-test-phases-py2.patch" - if ! cat < "$PATCH_DIR"/avocado-report-test-phases-py2.patch | \ - sudo patch -p1 -d "$pydir"; then - echo "Failed to apply avocado PR-3154 patch - py2 portion" - exit 1 - fi - fi -fi -# apply fix for https://github.com/avocado-framework/avocado/issues/2908 - fixed -# somewhere before 69.2 -if grep "TIMEOUT_TEST_INTERRUPTED" \ - "$pydir"/avocado/core/runner.py; then - sudo ed <= 83: return ["avocado", "list"] - if self.major >= 82: - return ["avocado", "--paginator=off", "list"] - return ["avocado", "list", "--paginator=off"] + return ["avocado", "--paginator=off", "list"] def get_list_regex(self): """Get the regular expression used to get the test file from the avocado list command. @@ -257,20 +193,15 @@ def get_run_command(self, test, tag_filters, sparse, failfast): list: avocado run command """ command = ["avocado"] - if not sparse and self.major >= 82: + if not sparse: command.append("--show=test") command.append("run") - if self.major >= 82: - command.append("--ignore-missing-references") - else: - command.extend(["--ignore-missing-references", "on"]) + command.append("--ignore-missing-references") if self.major >= 83: command.append("--disable-tap-job-result") else: command.extend(["--html-job-result", "on"]) command.extend(["--tap-job-result", "off"]) - if not sparse and self.major < 82: - command.append("--show-job-log") if tag_filters: command.extend(tag_filters) if failfast: diff --git a/src/tests/ftest/util/environment_utils.py b/src/tests/ftest/util/environment_utils.py index da7be582050..da08bd8c52b 100644 --- a/src/tests/ftest/util/environment_utils.py +++ b/src/tests/ftest/util/environment_utils.py @@ -1,5 +1,5 @@ """ - (C) Copyright 2018-2023 Intel Corporation. + (C) Copyright 2018-2024 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent """ @@ -18,7 +18,7 @@ class TestEnvironmentException(Exception): """Exception for launch.py execution.""" -def get_build_environment(logger, build_vars_file): +def _get_build_environment(logger, build_vars_file): """Obtain DAOS build environment variables from the .build_vars.json file. Args: @@ -29,19 +29,22 @@ def get_build_environment(logger, build_vars_file): TestEnvironmentException: if there is an error obtaining the DAOS build environment Returns: - dict: a dictionary of DAOS build environment variable names and values - + str: The prefix of the DAOS install. + None: If the file is not present. """ logger.debug("Obtaining DAOS build environment from %s", build_vars_file) try: with open(build_vars_file, encoding="utf-8") as vars_file: - return json.load(vars_file) + return json.load(vars_file)["PREFIX"] + + except FileNotFoundError: + return None except Exception as error: # pylint: disable=broad-except raise TestEnvironmentException("Error obtaining build environment:", str(error)) from error -def update_path(logger, build_vars_file): +def _update_path(logger, build_vars_file): """Update the PATH environment variable for functional testing. Args: @@ -51,17 +54,29 @@ def update_path(logger, build_vars_file): Raises: TestEnvironmentException: if there is an error obtaining the DAOS build environment """ - base_dir = get_build_environment(logger, build_vars_file)["PREFIX"] - bin_dir = os.path.join(base_dir, "bin") - sbin_dir = os.path.join(base_dir, "sbin") + base_dir = _get_build_environment(logger, build_vars_file) + + path = os.environ.get("PATH") + + parts = path.split(":") + + # If a custom prefix is used for the daos installation then prepend that to the path so that + # any binaries provided are picked up from there, else do not modify the path. + if base_dir: + bin_dir = os.path.join(base_dir, "bin") + sbin_dir = os.path.join(base_dir, "sbin") + + parts.insert(0, bin_dir) + parts.insert(0, sbin_dir) # /usr/sbin is not setup on non-root user for CI nodes. # SCM formatting tool mkfs.ext4 is located under /usr/sbin directory. usr_sbin = os.path.join(os.sep, "usr", "sbin") - path = os.environ.get("PATH") - # Update PATH - os.environ["PATH"] = ":".join([bin_dir, sbin_dir, usr_sbin, path]) + if usr_sbin not in parts: + parts.append(usr_sbin) + + os.environ["PATH"] = ":".join(parts) def set_python_environment(logger): @@ -79,10 +94,8 @@ def set_python_environment(logger): ] # Include the cart directory paths when running from sources - for cart_dir in os.listdir(os.path.abspath("cart")): - cart_path = os.path.join(os.path.abspath("cart"), cart_dir) - if os.path.isdir(cart_path): - required_python_paths.append(cart_path) + cart_utils_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "cart", "util") + required_python_paths.append(cart_utils_dir) required_python_paths.extend(site.getsitepackages()) @@ -541,7 +554,7 @@ def set_test_environment(logger, test_env=None, servers=None, clients=None, prov # Update the PATH environment variable build_vars_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), "..", "..", "..", ".build_vars.json") - update_path(logger, build_vars_file) + _update_path(logger, build_vars_file) # Get the default fabric interface and provider test_env.set_defaults(logger, servers, clients, provider, insecure_mode) diff --git a/src/tests/ftest/util/launch_utils.py b/src/tests/ftest/util/launch_utils.py index 21e005a48c6..e8dc3326c14 100644 --- a/src/tests/ftest/util/launch_utils.py +++ b/src/tests/ftest/util/launch_utils.py @@ -693,7 +693,7 @@ def _collect_crash_files(self, logger): Args: logger (Logger): logger for the messages produced by this method """ - avocado_logs_dir = self.avocado.get_logs_dir(logger) + avocado_logs_dir = self.avocado.get_logs_dir() crash_dir = os.path.join(avocado_logs_dir.replace("job-results", "data"), "crashes") if os.path.isdir(crash_dir): crash_files = [ diff --git a/utils/ansible/ftest/vars/defaults.yml b/utils/ansible/ftest/vars/defaults.yml index a5f24c204c6..ef679445e25 100644 --- a/utils/ansible/ftest/vars/defaults.yml +++ b/utils/ansible/ftest/vars/defaults.yml @@ -1,4 +1,4 @@ -# Install dependencies of a developmene node +# Install dependencies of a development node daos_avocado_version_default: 2.4.3 -daos_avocado_framework_version_default: 82.1 +daos_avocado_framework_version_default: 82 diff --git a/utils/ci/run_in_gha.sh b/utils/ci/run_in_gha.sh index 6ec76e66e68..fda112b4c87 100755 --- a/utils/ci/run_in_gha.sh +++ b/utils/ci/run_in_gha.sh @@ -69,10 +69,8 @@ echo ::group::Config file after ALT_PREFIX build cat daos.conf echo ::endgroup:: -echo ::group::Install pydaos -cd src/client -python3 setup.py install -cd - +echo ::group::Install pydaos via pip +pip install /opt/daos/lib/daos/python echo ::endgroup:: echo ::group::Setting up daos_server_helper diff --git a/utils/cq/requirements.txt b/utils/cq/requirements.txt index 8eed08932c4..d1c64f24918 100644 --- a/utils/cq/requirements.txt +++ b/utils/cq/requirements.txt @@ -1,10 +1,4 @@ -# Packages needed to check code. These are needed for linting rather than build. -# 94.0 is reporting a couple of errors so stick on 93. -avocado-framework<94 -avocado-framework-plugin-result-html<94 -avocado-framework-plugin-varianter-yaml-to-mux<94 -clustershell -paramiko +# Packages which are used by linting but not required for build or test. pyenchant ## flake8 6 removed --diff option which breaks flake precommit hook. ## https://github.com/pycqa/flake8/issues/1389 https://github.com/PyCQA/flake8/pull/1720 @@ -13,3 +7,5 @@ isort==5.13.2 pylint==3.1.0 yamllint==1.35.1 codespell==2.2.6 +# Used by ci/jira_query.py which pip installs it standalone. +jira diff --git a/utils/docker/Dockerfile.el.8 b/utils/docker/Dockerfile.el.8 index 8b3192f3e7f..9e511941a89 100644 --- a/utils/docker/Dockerfile.el.8 +++ b/utils/docker/Dockerfile.el.8 @@ -72,11 +72,9 @@ ENV PATH=/home/daos/venv/bin:$PATH ENV VIRTUAL_ENV=/home/daos/venv/ # Install latest versions of python tools. -# wheel is needed first to avoid a warning when installing pyyaml. -COPY requirements.txt . +COPY requirements-build.txt requirements-utest.txt ./ RUN python3 -m pip --no-cache-dir install --upgrade pip && \ - python3 -m pip --no-cache-dir install wheel && \ - python3 -m pip --no-cache-dir install --requirement requirements.txt + python3 -m pip --no-cache-dir install -r requirements-build.txt -r requirements-utest.txt WORKDIR /home/daos/pre RUN mkdir -p /home/daos/pre/site_scons/prereq_tools /home/daos/pre/site_scons/components @@ -124,7 +122,7 @@ USER daos_server:daos_server LABEL DAOS=true WORKDIR /home/daos/daos/ -COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct .clang-format ./ +COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct requirements-ftest.txt .clang-format ./ COPY --chown=daos_server:daos_server site_scons site_scons COPY --chown=daos_server:daos_server utils utils COPY --chown=daos_server:daos_server src src diff --git a/utils/docker/Dockerfile.el.9 b/utils/docker/Dockerfile.el.9 index 64ac487f9e7..afd7594a360 100644 --- a/utils/docker/Dockerfile.el.9 +++ b/utils/docker/Dockerfile.el.9 @@ -1,4 +1,4 @@ -# Copyright 2022-2023 Intel Corporation +# Copyright 2022-2024 Intel Corporation # All rights reserved. # # 'recipe' for Docker to build an image of EL 9 based @@ -58,11 +58,9 @@ ENV PATH=/home/daos/venv/bin:$PATH ENV VIRTUAL_ENV=/home/daos/venv/ # Install latest versions of python tools. -# wheel is needed first to avoid a warning when installing pyyaml. -COPY requirements.txt . +COPY requirements-build.txt requirements-utest.txt ./ RUN python3 -m pip --no-cache-dir install --upgrade pip && \ - python3 -m pip --no-cache-dir install wheel && \ - python3 -m pip --no-cache-dir install --requirement requirements.txt + python3 -m pip --no-cache-dir install -r requirements-build.txt -r requirements-utest.txt WORKDIR /home/daos/pre RUN mkdir -p /home/daos/pre/site_scons/prereq_tools /home/daos/pre/site_scons/components @@ -110,7 +108,7 @@ USER daos_server:daos_server LABEL DAOS=true WORKDIR /home/daos/daos/ -COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct .clang-format ./ +COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct requirements-ftest.txt .clang-format ./ COPY --chown=daos_server:daos_server site_scons site_scons COPY --chown=daos_server:daos_server utils utils COPY --chown=daos_server:daos_server src src diff --git a/utils/docker/Dockerfile.leap.15 b/utils/docker/Dockerfile.leap.15 index 638b18ac36f..b8326e9c840 100644 --- a/utils/docker/Dockerfile.leap.15 +++ b/utils/docker/Dockerfile.leap.15 @@ -1,4 +1,4 @@ -# Copyright 2018-2023 Intel Corporation +# Copyright 2018-2024 Intel Corporation # All rights reserved. # # 'recipe' for Docker to build an image of Leap based @@ -58,11 +58,9 @@ ENV PATH=/home/daos/venv/bin:$PATH ENV VIRTUAL_ENV=/home/daos/venv/ # Install latest versions of python tools. -# wheel is needed first to avoid a warning when installing pyyaml. -COPY requirements.txt . +COPY requirements-build.txt requirements-utest.txt ./ RUN python3 -m pip --no-cache-dir install --upgrade pip && \ - python3 -m pip --no-cache-dir install wheel && \ - python3 -m pip --no-cache-dir install --requirement requirements.txt + python3 -m pip --no-cache-dir install -r requirements-build.txt -r requirements-utest.txt WORKDIR /home/daos/pre RUN mkdir -p /home/daos/pre/site_scons/prereq_tools /home/daos/pre/site_scons/components @@ -130,7 +128,7 @@ USER daos_server:daos_server LABEL DAOS=true WORKDIR /home/daos/daos/ -COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct .clang-format ./ +COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct requirements-ftest.txt .clang-format ./ COPY --chown=daos_server:daos_server site_scons site_scons COPY --chown=daos_server:daos_server utils utils COPY --chown=daos_server:daos_server src src diff --git a/utils/docker/Dockerfile.ubuntu b/utils/docker/Dockerfile.ubuntu index 7113d44ca4c..c7bf969e009 100644 --- a/utils/docker/Dockerfile.ubuntu +++ b/utils/docker/Dockerfile.ubuntu @@ -1,4 +1,4 @@ -# Copyright 2018-2023 Intel Corporation +# Copyright 2018-2024 Intel Corporation # All rights reserved. # # 'recipe' for Docker to build an image of Ubuntu-based environment for building the DAOS project. @@ -54,11 +54,9 @@ ENV PATH=/home/daos/venv/bin:$PATH ENV VIRTUAL_ENV=/home/daos/venv/ # Install latest versions of python tools. -# wheel is needed first to avoid a warning when installing pyyaml. -COPY requirements.txt . +COPY requirements-build.txt requirements-utest.txt ./ RUN python3 -m pip --no-cache-dir install --upgrade pip && \ - python3 -m pip --no-cache-dir install wheel && \ - python3 -m pip --no-cache-dir install --requirement requirements.txt + python3 -m pip --no-cache-dir install -r requirements-build.txt -r requirements-utest.txt WORKDIR /home/daos/pre RUN mkdir -p /home/daos/pre/site_scons/prereq_tools /home/daos/pre/site_scons/components @@ -99,7 +97,7 @@ USER daos_server:daos_server LABEL DAOS=true WORKDIR /home/daos/daos/ -COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct .clang-format ./ +COPY --chown=daos_server:daos_server VERSION LICENSE ftest.sh SConstruct requirements-ftest.txt .clang-format ./ COPY --chown=daos_server:daos_server site_scons site_scons COPY --chown=daos_server:daos_server utils utils COPY --chown=daos_server:daos_server src src diff --git a/utils/node_local_test.py b/utils/node_local_test.py index f7de653a9c7..b88bc837870 100755 --- a/utils/node_local_test.py +++ b/utils/node_local_test.py @@ -1612,11 +1612,8 @@ def assert_file_size(ofd, size): assert_file_size_fd(ofd.fileno(), size) -def import_daos(server, conf): +def import_daos(server): """Return a handle to the pydaos module""" - pydir = f'python{sys.version_info.major}.{sys.version_info.minor}' - - sys.path.append(join(conf['PREFIX'], 'lib64', pydir, 'site-packages')) os.environ['DD_MASK'] = 'all' os.environ['DD_SUBSYS'] = 'all' @@ -5245,7 +5242,7 @@ def test_pydaos_kv(server, conf): delete=False) os.environ['D_LOG_FILE'] = pydaos_log_file.name - daos = import_daos(server, conf) + daos = import_daos(server) pool = server.get_test_pool_obj() @@ -5309,7 +5306,7 @@ def test_pydaos_kv_obj_class(server, conf): log_name = tmp_file.name os.environ['D_LOG_FILE'] = log_name - daos = import_daos(server, conf) + daos = import_daos(server) pool = server.get_test_pool_obj() diff --git a/utils/rpms/daos.spec b/utils/rpms/daos.spec index 16ac6ba9375..3e3e339fa77 100644 --- a/utils/rpms/daos.spec +++ b/utils/rpms/daos.spec @@ -15,7 +15,7 @@ Name: daos Version: 2.5.101 -Release: 2%{?relval}%{?dist} +Release: 3%{?relval}%{?dist} Summary: DAOS Storage Engine License: BSD-2-Clause-Patent @@ -207,9 +207,7 @@ packages Summary: The DAOS test suite Requires: %{name}-client%{?_isa} = %{version}-%{release} Requires: %{name}-admin%{?_isa} = %{version}-%{release} -Requires: python3-distro -Requires: python3-tabulate -Requires: python3-defusedxml +Requires: %{name}-devel%{?_isa} = %{version}-%{release} Requires: protobuf-c-devel Requires: fio Requires: git @@ -364,7 +362,7 @@ install -m 644 utils/systemd/%{agent_svc_name} %{buildroot}/%{_unitdir} mkdir -p %{buildroot}/%{conf_dir}/certs/clients mv %{buildroot}/%{conf_dir}/bash_completion.d %{buildroot}/%{_sysconfdir} # fixup env-script-interpreters -sed -i -e '1s/env //' %{buildroot}{%{daoshome}/TESTING/ftest/{cart/cart_logtest,config_file_gen,launch,slurm_setup,tags,verify_perms}.py,%{_bindir}/daos_storage_estimator.py,%{_datarootdir}/daos/control/setup_spdk.sh} +sed -i -e '1s/env //' %{buildroot}{%{daoshome}/TESTING/ftest/{cart/cart_logtest,config_file_gen,launch,slurm_setup,tags,verify_perms}.py,%{_bindir}/daos_storage_estimator.py} # shouldn't have source files in a non-devel RPM rm -f %{buildroot}%{daoshome}/TESTING/ftest/cart/{test_linkage.cpp,utest_{hlc,portnumber,protocol,swim}.c,wrap_cmocka.h} @@ -521,9 +519,6 @@ getent passwd daos_agent >/dev/null || useradd -s /sbin/nologin -r -g daos_agent %config(noreplace) %{conf_dir}/fault-inject-cart.yaml %{_bindir}/fault_status %{_bindir}/crt_launch -# For avocado tests -%{daoshome}/.build_vars.json -%{daoshome}/.build_vars.sh %{_bindir}/daos_perf %{_bindir}/daos_racer %{_bindir}/daos_test @@ -564,6 +559,7 @@ getent passwd daos_agent >/dev/null || useradd -s /sbin/nologin -r -g daos_agent %{_libdir}/libgurt.so %{_libdir}/libcart.so %{_libdir}/*.a +%{daoshome}/python %files firmware %doc README.md @@ -587,6 +583,10 @@ getent passwd daos_agent >/dev/null || useradd -s /sbin/nologin -r -g daos_agent # No files in a shim package %changelog +* Thu Apr 04 2024 Ashley M. Pittman 2.5.101-3 +- Update pydaos install process +- Add a dependency from daos-client-tests to daos-devel + * Mon Mar 18 2024 Jan Michalski 2.5.101-2 - Add dtx_tests to the server-tests package