diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 57f52554..3bdc799a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,10 +1,17 @@ version: 2 updates: - package-ecosystem: "pip" # See documentation for possible values - directory: "/requirements" # Location of package manifests - insecure-external-code-execution: allow + directory: "/" # Location of package manifests schedule: - interval: "daily" + interval: "weekly" labels: - "maintenance" - "dependencies" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + labels: + - "maintenance" + - "dependencies" \ No newline at end of file diff --git a/.github/labeler.yml b/.github/labeler.yml index f5d90427..30c2c1f3 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,8 +1,9 @@ documentation: -- doc/source/**/* +- changed-files: + - any-glob-to-any-file: ['doc/source/**/*'] maintenance: -- .github/**/* -- .flake8 -- pyproject.toml +- changed-files: + - any-glob-to-any-file: ['.github/**/*', '.flake8', 'pyproject.toml'] dependencies: -- requirements/* +- changed-files: + - any-glob-to-any-file: ['pyproject.toml'] \ No newline at end of file diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 689c9bb3..129d55ff 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -9,9 +9,9 @@ on: - main env: - MAIN_PYTHON_VERSION: '3.9' - DOC_PYTHON_VERSION: '3.9' - DOCUMENTATION_CNAME: 'rep.docs.pyansys.com' + MAIN_PYTHON_VERSION: '3.10' + DOCUMENTATION_CNAME: 'hps.docs.pyansys.com' + PACKAGE_NAME: 'ansys-hps-client' concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -34,16 +34,25 @@ jobs: - name: Test with tox run: tox -e style - quick-tests: + doc-style: + name: Documentation Style Check + runs-on: ubuntu-latest + steps: + - name: PyAnsys documentation style checks + uses: ansys/actions/doc-style@v5 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + tests: name: Quick tests and coverage runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest] - cfg: - - {python-version: "3.10", toxenv: "py310"} + os: [ubuntu-latest] + cfg: [ + { python-version: '3.10', toxenv: 'py310' }, + ] fail-fast: false - steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.cfg.python-version }} @@ -95,6 +104,14 @@ jobs: REP_TEST_USERNAME: repadmin REP_TEST_PASSWORD: repadmin + - name: "Upload coverage results" + uses: actions/upload-artifact@v4 + if: matrix.cfg.python-version == env.MAIN_PYTHON_VERSION + with: + name: coverage-html + path: .cov/html + retention-days: 7 + - name: Publish Test Report uses: mikepenz/action-junit-report@v3 if: always() @@ -113,102 +130,170 @@ jobs: docs: name: Documentation runs-on: ubuntu-latest - # needs: docs-style + needs: [doc-style] steps: - uses: actions/checkout@v3 + - name: Set up Python uses: actions/setup-python@v4 with: - python-version: ${{ env.DOC_PYTHON_VERSION }} + python-version: ${{ env.MAIN_PYTHON_VERSION }} + - name: Install dependencies run: | python -m pip install --upgrade pip setuptools tox + + # Necessary for creating PDF + sudo apt-get -y install latexmk + sudo apt install texlive-latex-extra + - name: Generate the documentation with tox run: tox -e doc + - name: Upload HTML Documentation - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: documentation-html - path: build/sphinx/html + path: doc/_build/html retention-days: 7 - upload_dev_docs: - name: "Upload dev documentation" - if: github.ref == 'refs/heads/main' + - name: Upload PDF Documentation + uses: actions/upload-artifact@v4 + with: + name: documentation-pdf + path: doc/_build/latex/*.pdf + retention-days: 7 + + smoke-tests: + name: Build and Smoke tests + runs-on: ${{ matrix.os }} + needs: [style] + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ['3.9', '3.10', '3.11', '3.12'] + should-release: + - ${{ github.event_name == 'push' && contains(github.ref, 'refs/tags') }} + exclude: + - should-release: false + os: macos-latest + steps: + - name: Build wheelhouse and perform smoke test + uses: ansys/actions/build-wheelhouse@v5 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} + + package: + name: Package library + needs: [tests, docs, smoke-tests] runs-on: ubuntu-latest - needs: [docs] steps: - - name: Deploy the latest documentation - uses: ansys/actions/doc-deploy-dev@v2 + - name: Build library source and wheel artifacts + uses: ansys/actions/build-library@v5 with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} + library-name: ${{ env.PACKAGE_NAME }} + python-version: ${{ env.MAIN_PYTHON_VERSION }} - build: - name: Build and publish library + library-version: + name: Get library version runs-on: ubuntu-latest - outputs: - version: ${{ steps.build.outputs.version }} steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 + - name: "Install Git and clone project" + uses: actions/checkout@v4 + + - name: "Set up Python ${{ env.MAIN_PYTHON_VERSION }}" + uses: ansys/actions/_setup-python@main with: python-version: ${{ env.MAIN_PYTHON_VERSION }} - - name: Install dependencies and build the library - id: build + use-cache: true + + - name: "Install the library" + shell: bash run: | - python -m pip install --upgrade pip setuptools - python -m pip install -r requirements/requirements_build.txt - python -m build venv wheel - python -m twine check dist/* - version=$(ls dist | grep ansys_pyhps | grep -E -o "[0-9]+.[0-9]+.[a-z0-9]+") - echo "version=$version" >> $GITHUB_OUTPUT - #- name: Upload to private PyPi - # if: github.ref == 'refs/heads/main' - # run: | - # python -m twine upload --verbose --skip-existing dist/*.whl - # env: - # TWINE_USERNAME: __token__ - # TWINE_PASSWORD: ${{ secrets.PYANSYS_PYPI_PRIVATE_PAT }} - # TWINE_REPOSITORY_URL: https://pkgs.dev.azure.com/pyansys/_packaging/pyansys/pypi/upload - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: ansys-pyhps-package - path: | - dist/*.whl - retention-days: 5 + python -m pip install -e . - release: - name: "Release" - if: startsWith(github.ref, 'refs/heads/main') || github.ref_type == 'tag' - needs: [build, docs] + - name: "Verify library is properly installed and get its version number" + shell: bash + run: | + library_name=${{ env.PACKAGE_NAME }} + version=$(python -c "import importlib.metadata as importlib_metadata; print(importlib_metadata.version('$library_name'))") + + if [ -z "$version" ]; then + echo "Problem getting the library version" + exit 1; + else + echo "The library version is: $version"; + fi; + echo "library_version=$version" >> $GITHUB_ENV + + pre-release: + name: Pre-release project + if: startsWith(github.ref, 'refs/heads/main') + needs: [package, library-version] runs-on: ubuntu-latest steps: - uses: actions/download-artifact@v3 with: - name: ansys-pyhps-package + name: ansys-hps-client-artifacts path: /tmp/artifacts - name: List artifacts run: ls -ltR /tmp/artifacts - - name: Release for tag - if: github.ref_type == 'tag' - uses: softprops/action-gh-release@v1 - with: - generate_release_notes: true - files: | - /tmp/artifacts/*.whl - - name: Release to latest-dev uses: "marvinpinto/action-automatic-releases@latest" if: startsWith(github.ref, 'refs/heads/main') with: repo_token: ${{ secrets.GITHUB_TOKEN }} - automatic_release_tag: "v${{needs.build.outputs.version}}" + automatic_release_tag: "v${{ env.library_version }}" prerelease: true - title: "v${{needs.build.outputs.version}}" + title: "v${{ env.library_version }}" files: | /tmp/artifacts/*.whl + + upload_dev_docs: + name: "Deploy dev documentation" + if: github.ref == 'refs/heads/main' + runs-on: ubuntu-latest + needs: [package] + steps: + - name: Deploy the latest documentation + uses: ansys/actions/doc-deploy-dev@v4 + with: + doc-artifact-name: 'documentation-html' + cname: ${{ env.DOCUMENTATION_CNAME }} + token: ${{ secrets.GITHUB_TOKEN }} + + release: + name: Release project + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + needs: [package] + runs-on: ubuntu-latest + steps: + - name: Release to the private PyPI repository + uses: ansys/actions/release-pypi-private@v5 + with: + library-name: ${{ env.PACKAGE_NAME }} + twine-username: "__token__" + twine-token: ${{ secrets.PYANSYS_PYPI_PRIVATE_PAT }} + + - name: Release to GitHub + uses: ansys/actions/release-github@v5 + with: + library-name: ${{ env.PACKAGE_NAME }} + + doc-deploy-stable: + name: "Deploy stable documentation" + # Deploy release documentation when creating a new tag + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + runs-on: ubuntu-latest + needs: [release] + steps: + - uses: ansys/actions/doc-deploy-stable@v4 + with: + doc-artifact-name: 'documentation-html' + cname: ${{ env.DOCUMENTATION_CNAME }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml index ae405f78..c8f362ab 100644 --- a/.github/workflows/label.yml +++ b/.github/workflows/label.yml @@ -32,10 +32,9 @@ jobs: # Label based on modified files - name: Label based on changed files - uses: actions/labeler@v4 + uses: actions/labeler@v5 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - sync-labels: '' # Label based on branch name - uses: actions-ecosystem/action-add-labels@v1 @@ -77,9 +76,9 @@ jobs: issue-number: ${{ github.event.pull_request.number }} body: | Please add one of the following labels to add this contribution to the Release Notes :point_down: - - [bug]([https://github.com/pyansys/pyhps/pulls?q=label%3Abug+) - - [documentation]([https://github.com/pyansys/pyhps/pulls?q=label%3Adocumentation+) - - [enhancement]([https://github.com/pyansys/pyhps/pulls?q=label%3Aenhancement+) - - [good first issue]([https://github.com/pyansys/pyhps/pulls?q=label%3Agood+first+issue) - - [maintenance]([https://github.com/pyansys/pyhps/pulls?q=label%3Amaintenance+) - - [release]([https://github.com/pyansys/pyhps/pulls?q=label%3Arelease+) + - [bug]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Abug+) + - [documentation]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Adocumentation+) + - [enhancement]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Aenhancement+) + - [good first issue]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Agood+first+issue) + - [maintenance]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Amaintenance+) + - [release]([https://github.com/ansys-internal/pyhps/pulls?q=label%3Arelease+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index c324b36c..d77ab39c 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -5,8 +5,9 @@ on: - cron: "0 2 * * *" env: - MAIN_PYTHON_VERSION: '3.9' - DOC_PYTHON_VERSION: '3.9' + MAIN_PYTHON_VERSION: '3.10' + DOCUMENTATION_CNAME: 'hps.docs.pyansys.com' + PACKAGE_NAME: 'ansys-hps-client' concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -28,19 +29,28 @@ jobs: python -m pip install --upgrade pip setuptools tox - name: Test with tox run: tox -e style + + doc-style: + name: Documentation Style Check + runs-on: ubuntu-latest + steps: + - name: PyAnsys documentation style checks + uses: ansys/actions/doc-style@v5 + with: + token: ${{ secrets.GITHUB_TOKEN }} - quick-tests: + tests: name: Quick tests and coverage runs-on: ${{ matrix.os }} strategy: matrix: - # extend to windows once it's possible to run docker - # on windows runners: - # os: [windows-latest, ubuntu-latest] - os: [ubuntu-latest] - cfg: - - {python-version: "3.9", toxenv: "py39"} - - {python-version: "3.11", toxenv: "py311"} + os: [ubuntu-latest] + cfg: [ + { python-version: '3.9', toxenv: 'py39' }, + { python-version: '3.10', toxenv: 'py310' }, + { python-version: '3.11', toxenv: 'py311' }, + { python-version: '3.12', toxenv: 'py312' }, + ] fail-fast: false steps: @@ -67,7 +77,7 @@ jobs: - uses: KengoTODA/actions-setup-docker-compose@main env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - + - name: Login in Github Container registry uses: docker/login-action@v2 with: @@ -88,68 +98,90 @@ jobs: working-directory: ./docker-compose-artifact - name: Test with tox - run: tox -e ${{ matrix.cfg.toxenv }}-noeval-coverage + run: tox -e ${{ matrix.cfg.toxenv }}-noeval-coverage env: REP_TEST_URL: https://localhost:8443/rep REP_TEST_USERNAME: repadmin REP_TEST_PASSWORD: repadmin + - name: "Upload coverage results" + uses: actions/upload-artifact@v4 + if: matrix.cfg.python-version == env.MAIN_PYTHON_VERSION + with: + name: coverage-html + path: .cov/html + retention-days: 7 + - name: Publish Test Report uses: mikepenz/action-junit-report@v3 if: always() with: report_paths: '**/test*.xml' - check_name: Test Report ${{ matrix.os }}:${{ matrix.cfg.python-version }} + check_name: Test Report ${{ matrix.os }}:${{ matrix.cfg.python-version }} detailed_summary: true include_passed: true - + - name: Stop services if: always() run: | docker-compose down -v working-directory: ./docker-compose-artifact/docker-compose - - build: - name: Build wheel - runs-on: ubuntu-latest + + smoke-tests: + name: Build and Smoke tests + runs-on: ${{ matrix.os }} + needs: [style] + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ['3.9', '3.10', '3.11', '3.12'] + should-release: + - ${{ github.event_name == 'push' && contains(github.ref, 'refs/tags') }} + exclude: + - should-release: false + os: macos-latest steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 + - name: Build wheelhouse and perform smoke test + uses: ansys/actions/build-wheelhouse@v5 with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - name: Install dependencies and build the library - run: | - python -m pip install --upgrade pip setuptools - python -m pip install -r requirements/requirements_build.txt - python -m build venv wheel - python -m twine check dist/* - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: pyhps-package - path: | - dist/*.whl - retention-days: 5 + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} docs: name: Documentation runs-on: ubuntu-latest - # needs: docs-style + needs: [style, doc-style] steps: - uses: actions/checkout@v3 + - name: Set up Python uses: actions/setup-python@v4 with: - python-version: ${{ env.DOC_PYTHON_VERSION }} + python-version: ${{ env.MAIN_PYTHON_VERSION }} + - name: Install dependencies run: | python -m pip install --upgrade pip setuptools tox + + # Necessary for creating PDF + sudo apt-get -y install latexmk + sudo apt install texlive-latex-extra + - name: Generate the documentation with tox run: tox -e doc + - name: Upload HTML Documentation - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: documentation-html - path: build/sphinx/html - retention-days: 5 + path: doc/_build/html + retention-days: 7 + + - name: Upload PDF Documentation + uses: actions/upload-artifact@v4 + with: + name: documentation-pdf + path: doc/_build/latex/*.pdf + retention-days: 7 \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index afff0d69..787cd5a8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,7 +34,7 @@ repos: rev: v0.2.8 hooks: - id: add-license-headers - files: '(ansys|examples|tests)/.*\.(py)|\.(proto)|generate_resources.py|build.py|archive_examples.py' + files: '(ansys|examples|tests)/.*\.(py)|\.(proto)|generate_resources.py|archive_examples.py' # For now we disable some of these checks, can be reenabled later # - repo: https://github.com/pycqa/pydocstyle diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..dde31397 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,12 @@ +# This is the list of PyHPS's significant contributors. +# +# This file does not necessarily list everyone who has contributed code. +# +# For contributions made under a Corporate CLA, the organization is +# added to this file. +# +# If you have contributed to the repository and wish to be added to this file +# please submit a request. +# +# +ANSYS, Inc. \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 505d21df..8f9d1201 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1 +1,169 @@ -# CHANGELOG \ No newline at end of file +# CHANGELOG + +## [0.5.0](https://github.com/ansys-internal/pyhps/releases/tag/v0.5.0) - December 14 2023 + +### Added +* Expose the resource management service (RMS) API by @FedericoNegri in #247 +* Add missing RMS api doc file by @FedericoNegri in #250 +* Add wheel asset to release by @FedericoNegri in #255 + +### Fixed +* Fix download of file when the evaluation path contains a subdir by @FedericoNegri in #254 +* Fix task def property by @wehrler in #257 + +### Changed +* Update nightly workflow by @FedericoNegri in #256 + +### Dependencies +* Bump datamodel-code-generator from 0.24.1 to 0.25.1 in /requirements by @dependabot in #249 + +## [0.4.0](https://github.com/ansys-internal/pyhps/releases/tag/v0.4.0) - November 17 2023 + +### Added +* Add HPC resources by @wehrler in #186 +* Add PR, bug and feature template by @sashankh01 in #179 +* Expose SSL certificates verification by @FedericoNegri in #235 +* Create File resource with a file-like object by @FedericoNegri in #238 + +### Fixed +* Jnovak/client refresh failure by @Buzz1167 in #194 +* Fix output files and parameter ids in the success criteria by @FedericoNegri in #234 +* Fix marshmallow warnings by @FedericoNegri in #239 + +### Changed +* Update schemas and resources by @FedericoNegri in #196 +* Update tests requiring the auth api by @FedericoNegri in #201 +* Update github workflows by @FedericoNegri in #220 +* Update Python and dependencies versions by @FedericoNegri in #228 +* Remove unnecessary files from wheel by @FedericoNegri in #237 +* Update examples to optionally use the official execution scripts by @saimanikant in #240 + +### Dependencies +* Bump pytest from 7.3.2 to 7.4.3 in /requirements by @dependabot in #192, #232 +* Bump docs requirements by @FedericoNegri in #245 + +## [0.3.0](https://github.com/ansys-internal/pyhps/releases/tag/v0.3.0) - June 29 2023 + +### Added +* Add build_info to evaluator's schema by @FedericoNegri in #190 +* Expose created/modified by by @FedericoNegri in #189 + +### Changed +* Rename external version by @FedericoNegri in #184 +* Rework auth api by @FedericoNegri in #185 +* Bump default Ansys apps version by @FedericoNegri in #187 + +### Dependencies +* Bump ``ansys-sphinx-theme`` from 0.9.8 to 0.9.9 in /requirements by @dependabot in #173 +* Bump ``pytest`` from 7.3.1 to 7.3.2 in /requirements by @dependabot in #188 +* Bump ``pytest-cov`` from 4.0.0 to 4.1.0 in /requirements by @dependabot in #183 + +## [0.2.0](https://github.com/ansys-internal/pyhps/releases/tag/v0.2.0) - June 6 2023 + +### Added +* Objects copy by @FedericoNegri in #181 + +### Fixed +* Fix test: task files by @FedericoNegri in #174 + +### Changed +* Update ci_cd.yml by @FedericoNegri in #175 +* Update nightly build by @FedericoNegri in #176 +* Update ci/cd by @FedericoNegri in #178 +* Adjust schemas to match jms and evaluator by @wehrler in #182 +* Remove file storage from project schema by @wehrler in #160 + +## [0.1.0](https://github.com/ansys-internal/pyhps/releases/tag/v0.1.0) - May 9 2023 + +### Added + +* Add missing fields for app template handling by @nezgrath in https://github.com/pyansys/pyrep/pull/14 +* Okoenig/exec script examples by @ojkoenig in https://github.com/pyansys/pyrep/pull/18 +* Add publish step to private pypi. by @jonathanzopes in https://github.com/pyansys/pyrep/pull/20 +* Add log messages in execution scripts by @ojkoenig in https://github.com/pyansys/pyrep/pull/27 +* Add use_execution_script to pyrep by @ojkoenig in https://github.com/pyansys/pyrep/pull/33 +* Add new examples + docstring updates by @FedericoNegri in https://github.com/pyansys/pyrep/pull/35 +* Add version as a cmd line argument in the examples by @FedericoNegri in https://github.com/pyansys/pyrep/pull/71 +* Add nightly build and make CI run faster by @FedericoNegri in https://github.com/pyansys/pyrep/pull/85 +* Add task custom_data field by @FedericoNegri in https://github.com/pyansys/pyrep/pull/143 +* Add release job to ci/cd by @FedericoNegri in https://github.com/pyansys/pyrep/pull/172 +* Common Client and separate API objects by @FedericoNegri in https://github.com/pyansys/pyrep/pull/28 +* Set REP server for CI tests by @FedericoNegri in https://github.com/pyansys/pyrep/pull/39 +* Include examples in the doc by @FedericoNegri in https://github.com/pyansys/pyrep/pull/45 +* Auto generate code for resource objects by @FedericoNegri in https://github.com/pyansys/pyrep/pull/42 +* Fluent example by @FedericoNegri in https://github.com/pyansys/pyrep/pull/46 +* Separate tests by @FedericoNegri in https://github.com/pyansys/pyrep/pull/49 +* Davel/fluent example by @davel94 in https://github.com/pyansys/pyrep/pull/52 +* CFX example by @davel94 in https://github.com/pyansys/pyrep/pull/55 +* doc for fluent nozzle and cfx examples by @davel94 in https://github.com/pyansys/pyrep/pull/58 +* get_project_by_name in mapdl_motorbike_frame example by @saimanikant in https://github.com/pyansys/pyrep/pull/65 +* Don't assume projects exists on server for tests by @FedericoNegri in https://github.com/pyansys/pyrep/pull/86 +* Test task definition fields by @FedericoNegri in https://github.com/pyansys/pyrep/pull/92 +* Auto-generate __init__ arguments for resources by @FedericoNegri in https://github.com/pyansys/pyrep/pull/93 +* Correct dependabot labels by @nezgrath in https://github.com/pyansys/pyrep/pull/101 +* Task definition template permissions by @FedericoNegri in https://github.com/pyansys/pyrep/pull/100 +* Sort File System Rest Gateway by ascending priority by @FedericoNegri in https://github.com/pyansys/pyrep/pull/112 +* Test against local REP deployment by @FedericoNegri in https://github.com/pyansys/pyrep/pull/127 +* Verify that a user can query newly created templates by @FedericoNegri in https://github.com/pyansys/pyrep/pull/118 +* Enable multi-version documentation by @greschd in https://github.com/pyansys/pyrep/pull/130 +* Run unit tests in nightly build against local deployment by @FedericoNegri in https://github.com/pyansys/pyrep/pull/132 +* Clean up old/unused files by @FedericoNegri in https://github.com/pyansys/pyrep/pull/133 +* Expose distributed flag in the task definition and template by @FedericoNegri in https://github.com/pyansys/pyrep/pull/134 +* Expose argument to control the default for the `fields="all"` query parameter by @FedericoNegri in https://github.com/pyansys/pyrep/pull/150 +* Helper function to copy default execution scripts by @FedericoNegri in https://github.com/pyansys/pyrep/pull/154 +* Ansys org rename by @FedericoNegri in https://github.com/pyansys/pyrep/pull/170 +* Expose sync_jobs and deprecate _sync_jobs by @FedericoNegri in https://github.com/pyansys/pyrep/pull/171 + +### Fixed + +* Fix mapdl_motorbike_frame examples. To be continued by @ojkoenig in https://github.com/pyansys/pyrep/pull/5 +* Okoenig/fix examples by @ojkoenig in https://github.com/pyansys/pyrep/pull/6 +* Okoenig/fix examples by @ojkoenig in https://github.com/pyansys/pyrep/pull/8 +* Minor fix by @ojkoenig in https://github.com/pyansys/pyrep/pull/9 +* Minor fix for the example as reported by Sorin by @ojkoenig in https://github.com/pyansys/pyrep/pull/12 +* Adjust replace rules by @nezgrath in https://github.com/pyansys/pyrep/pull/13 +* Small fix for task definition schema. execution_script_id is optional by @ojkoenig in https://github.com/pyansys/pyrep/pull/17 +* Pass env correctly by @ojkoenig in https://github.com/pyansys/pyrep/pull/19 +* Jzopes/fix code style by @jonathanzopes in https://github.com/pyansys/pyrep/pull/22 +* okoenig/fix tests by @ojkoenig in https://github.com/pyansys/pyrep/pull/24 +* Fix doc build by @FedericoNegri in https://github.com/pyansys/pyrep/pull/111 +* Fix api update calls by @FedericoNegri in https://github.com/pyansys/pyrep/pull/126 + +### Changed + +* Change required_output_parameters to required_output_parameter_ids by @nezgrath in https://github.com/pyansys/pyrep/pull/2 +* Updating more examples by @ojkoenig in https://github.com/pyansys/pyrep/pull/7 +* Disable pydocstyle for now by @ojkoenig in https://github.com/pyansys/pyrep/pull/11 +* Update doc build step. by @jonathanzopes in https://github.com/pyansys/pyrep/pull/21 +* Minor update for exec scripts by @ojkoenig in https://github.com/pyansys/pyrep/pull/23 +* Doc update + get_project_by_name and pretty print by @FedericoNegri in https://github.com/pyansys/pyrep/pull/36 +* Update task definition templates + doc improvements by @FedericoNegri in https://github.com/pyansys/pyrep/pull/38 +* Update app names by @FedericoNegri in https://github.com/pyansys/pyrep/pull/53 +* Update url of testing server by @FedericoNegri in https://github.com/pyansys/pyrep/pull/59 +* Schema and resource generation updates by @FedericoNegri in https://github.com/pyansys/pyrep/pull/57 +* Update test server credentials by @FedericoNegri in https://github.com/pyansys/pyrep/pull/75 +* Expose update of evaluator configuration by @FedericoNegri in https://github.com/pyansys/pyrep/pull/137 +* Update dependencies by @FedericoNegri in https://github.com/pyansys/pyrep/pull/145 +* Update dependencies by @FedericoNegri in https://github.com/pyansys/pyrep/pull/162 +* Update project copy and evaluator's schema by @FedericoNegri in https://github.com/pyansys/pyrep/pull/165 +* Change source import for flake8 by @RobPasMue in https://github.com/pyansys/pyrep/pull/88 +* changes to exec scripts for templates. by @davel94 in https://github.com/pyansys/pyrep/pull/80 +* Specify python-keycloak<=2.12.0 to avoid breaking changes with newer releases by @FedericoNegri in https://github.com/pyansys/pyrep/pull/141 +* Re-enable the Auth API by @FedericoNegri in https://github.com/pyansys/pyrep/pull/32 +* Adjust package name to ansys-rep-client by @FedericoNegri in https://github.com/pyansys/pyrep/pull/138 +* Improve type annotations and APIs doc by @FedericoNegri in https://github.com/pyansys/pyrep/pull/47 + +### Dependencies +* Bump ``sphinx`` from 5.0.2 to 0.10.0 in /requirements by @dependabot in ([#4](https://github.com/pyansys/pyrep/pull/4), [#73](https://github.com/pyansys/pyrep/pull/73), [#109](https://github.com/pyansys/pyrep/pull/109)) +* Bump ``ansys-sphinx-theme`` from 0.4.2 to 0.8.2 in /requirements by @dependabot in ([#16](https://github.com/pyansys/pyrep/pull/16), [#25](https://github.com/pyansys/pyrep/pull/25), [#37](https://github.com/pyansys/pyrep/pull/37), [#91](https://github.com/pyansys/pyrep/pull/91), [#98](https://github.com/pyansys/pyrep/pull/98), [#122](https://github.com/pyansys/pyrep/pull/122), [#129](https://github.com/pyansys/pyrep/pull/129)) +* Bump ``sphinxnotes-strike`` from 1.1 to 1.2 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/34 +* Bump ``pytest`` from 7.1.2 to 7.2.1 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/44, https://github.com/pyansys/pyrep/pull/77, https://github.com/pyansys/pyrep/pull/113 +* Bump ``sphinx-autodoc-typehints`` from 1.18.1 to 1.22 in /requirements by @dependabot in ([#54](https://github.com/pyansys/pyrep/pull/54), [#108](https://github.com/pyansys/pyrep/pull/108), [#123](https://github.com/pyansys/pyrep/pull/123), [#124](https://github.com/pyansys/pyrep/pull/124), [#128](https://github.com/pyansys/pyrep/pull/128) +* Bump dependencies version by @FedericoNegri in https://github.com/pyansys/pyrep/pull/72 +* Bump ``apispec`` from 5.2.2 to 6.0.2 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/74, https://github.com/pyansys/pyrep/pull/87 +* Bump versions in requirements by @FedericoNegri in https://github.com/pyansys/pyrep/pull/84 +* Bump ``sphinx-copybutton`` from 0.5 to 0.5.1 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/89 +* Bump ``sphinxcontrib-httpdomain`` from 1.8.0 to 1.8.1 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/90 +* Bump ``twine`` from 4.0.1 to 4.0.2 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/95 +* Bump ``build`` from 0.9.0 to 0.10.0 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/110 +* Bump ``sphinxcontrib-globalsubs`` from 0.1.0 to 0.1.1 in /requirements by @dependabot in https://github.com/pyansys/pyrep/pull/115 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 1cf484f1..6705d053 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,18 +1,18 @@ -# Contributor Covenant Code of Conduct +# Contributor covenant code of conduct -## Our Pledge +## Pledge -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our -project and our community a harassment-free experience for everyone, -regardless of age, body size, disability, ethnicity, sex -characteristics, gender identity and expression, level of experience, -education, socio-economic status, nationality, personal appearance, -race, religion, or sexual identity and orientation. +In the interest of fostering an open and welcoming environment, +all contributors and maintainers pledge to making participation +in the Ansys project and community a harassment-free experience +for everyone, regardless of age, body size, disability, ethnicity, +sex characteristics, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. -## Our Standards +## Standards -Examples of behavior that contributes to creating a positive environment +Examples of behavior that contribute to creating a positive environment include: * Using welcoming and inclusive language @@ -32,7 +32,7 @@ Examples of unacceptable behavior by participants include: * Other conduct which could reasonably be considered inappropriate in a professional setting -## Our Responsibilities +## Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in @@ -48,18 +48,18 @@ offensive, or harmful. This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail +representing a project or community include using an official project email address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Attribution -This Code of Conduct is adapted from the [Contributor Covenant][homepage], +This code of conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq +https://www.contributor-covenant.org/faq \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index deebbad3..bf9cc8aa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,2 +1,7 @@ -# Contributing +# Contributing +Please, refer to the [PyAnsys Developer's Guide] for contributing to this project. + +[PyAnsys Developer's Guide]: https://dev.docs.pyansys.com/index.html + + \ No newline at end of file diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md new file mode 100644 index 00000000..9159c893 --- /dev/null +++ b/CONTRIBUTORS.md @@ -0,0 +1,20 @@ +# Contributors + +## Project Lead or Owner + +* [Federico Negri](https://github.com/FedericoNegri) +* [Oliver Koenig](https://github.com/ojkoenig) + +## Individual Contributors + +* [davel94](https://github.com/davel94) +* [Jonathan Zopes](https://github.com/jonathanzopes) +* [Ryan Wehrle](https://github.com/wehrler) +* [Michal Pawlik](https://github.com/nezgrath) +* [Jon Novak](https://github.com/Buzz1167) +* [Manikanth Sai](https://github.com/saimanikant) +* [Dominik Gresch](https://github.com/greschd) +* [Nathan Sharp](https://github.com/phxnsharp) +* [Sashanka Krishna](https://github.com/sashankh01) +* [Roberto Pastor Muela](https://github.com/RobPasMue) +* [Kerry McAdams](https://github.com/klmcadams) \ No newline at end of file diff --git a/LICENSE b/LICENSE index 94e66d5a..27fe2c1d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,13 +1,13 @@ MIT License -Copyright (c) 2022 PyAnsys +Copyright (c) 2024 ANSYS, Inc. and/or its affiliates. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +SOFTWARE. \ No newline at end of file diff --git a/README.rst b/README.rst index 46368161..94059d58 100644 --- a/README.rst +++ b/README.rst @@ -6,20 +6,20 @@ PyHPS :target: https://docs.pyansys.com/ :alt: PyAnsys -.. |python| image:: https://img.shields.io/badge/Python-%3E%3D3.7-blue - :target: https://pypi.org/project/ansys-rep/ +.. |python| image:: https://img.shields.io/pypi/pyversions/ansys-hps-client?logo=pypi + :target: https://pypi.org/project/ansys-hps-client :alt: Python -.. |pypi| image:: https://img.shields.io/pypi/v/ansys-rep.svg?logo=python&logoColor=white - :target: https://pypi.org/project/ansys-rep +.. |pypi| image:: https://img.shields.io/pypi/v/ansys-hps-client.svg?logo=python&logoColor=white + :target: https://pypi.org/project/ansys-hps-client :alt: PyPI -.. |codecov| image:: https://codecov.io/gh/pyansys/pyhps/branch/main/graph/badge.svg - :target: https://codecov.io/gh/pyansys/pyhps +.. |codecov| image:: https://codecov.io/gh/ansys-internal/pyhps/branch/main/graph/badge.svg + :target: https://codecov.io/gh/ansys-internal/ansys-hps-client :alt: Codecov -.. |GH-CI| image:: https://github.com/pyansys/pyhps/actions/workflows/ci_cd.yml/badge.svg - :target: https://github.com/pyansys/pyhps/actions/workflows/ci_cd.yml +.. |GH-CI| image:: https://github.com/ansys-internal/pyhps/actions/workflows/ci_cd.yml/badge.svg + :target: https://github.com/ansys-internal/pyhps/actions/workflows/ci_cd.yml :alt: GH-CI .. |MIT| image:: https://img.shields.io/badge/License-MIT-yellow.svg @@ -87,4 +87,4 @@ without changing the core behavior or license of the original software. The use of PyHPS requires a legally licensed local copy of AEDT. To get a copy of AEDT, see the `Ansys HPC Platform Services Guide `_` -in the Ansys Help. \ No newline at end of file +in the Ansys Help. diff --git a/ansys/hps/client/rms/models.py b/ansys/hps/client/rms/models.py deleted file mode 100644 index a0c45b96..00000000 --- a/ansys/hps/client/rms/models.py +++ /dev/null @@ -1,709 +0,0 @@ -# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. -# SPDX-License-Identifier: MIT -# -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -# generated by datamodel-codegen: -# filename: rms_openapi.json -# timestamp: 2024-01-15T16:13:17+00:00 - -from __future__ import annotations - -from datetime import datetime -from enum import Enum -from typing import Any, Dict, List, Optional, Union - -from pydantic import BaseModel, Field -from typing_extensions import Literal - - -class ApplicationInfo(BaseModel): - name: str = Field(..., description='Application name', title='Name') - version: str = Field(..., description='Application version', title='Version') - install_path: str = Field( - ..., description='Installation path of application', title='Install Path' - ) - executable: str = Field( - ..., description='Executable path to run application', title='Executable' - ) - environment: Optional[Dict[str, Any]] = Field( - None, description='Environment setup for the process', title='Environment' - ) - capabilities: Optional[List[str]] = Field( - None, description='Capabilities of this application', title='Capabilities' - ) - customization_hook: Optional[Dict[str, Any]] = Field( - None, - description='Details of a custom hook used to modify the configuration before runs are performed', - title='Customization Hook', - ) - - -class EvaluatorTaskDirectoryCleanup(Enum): - always = 'always' - on_success = 'on_success' - never = 'never' - - -class ContextUpdate(BaseModel): - custom: Optional[Dict[str, Optional[Union[int, bool, str, float]]]] = Field( - {}, description='Custom runtime properties.', title='Custom' - ) - - -class CrsCountResponse(BaseModel): - num_compute_resource_sets: Optional[int] = Field(0, title='Num Compute Resource Sets') - - -class TaskDirectoryCleanupEnum(Enum): - always = 'always' - on_success = 'on_success' - never = 'never' - - -class EvaluatorRegistration(BaseModel): - id: Optional[str] = Field(None, description='Unique ID for this worker.', title='Id') - name: Optional[str] = Field(None, description='User-defined name for this worker.', title='Name') - last_modified: Optional[datetime] = Field( - None, - description='Date and time when the registration was last modified.', - title='Last Modified', - ) - host_id: Optional[str] = Field( - None, description='Static hardware and configuration-based UUID.', title='Host Id' - ) - host_name: Optional[str] = Field( - None, description='Name of the host that the worker is running on.', title='Host Name' - ) - username: Optional[str] = Field( - None, description='Username that the evaluator authenticated with.', title='Username' - ) - platform: Optional[str] = Field( - None, description='OS that the evaluator is running on.', title='Platform' - ) - build_info: Optional[Dict[str, Any]] = Field( - {}, description='Detailed build information.', title='Build Info' - ) - compute_resource_set_id: Optional[str] = Field( - None, - description='ID of the compute resource set that the evaluator belongs to.', - title='Compute Resource Set Id', - ) - change_requested: Optional[datetime] = Field( - None, - description="Date and time of the configuration's last modification request.", - title='Change Requested', - ) - - -class EvaluatorsCountResponse(BaseModel): - num_evaluators: Optional[int] = Field(0, title='Num Evaluators') - - -class EvaluatorsRequest(BaseModel): - evaluators: List[EvaluatorRegistration] = Field( - ..., description='Evaluator details', title='Evaluators' - ) - - -class EvaluatorsResponse(BaseModel): - evaluators: List[EvaluatorRegistration] = Field( - ..., description='Evaluator details', title='Evaluators' - ) - - -class HpcResources(BaseModel): - num_cores_per_node: Optional[int] = Field( - None, description='Number of cores per node.', title='Num Cores Per Node' - ) - num_gpus_per_node: Optional[int] = Field( - None, description='Number of GPUs per node.', title='Num Gpus Per Node' - ) - exclusive: Optional[bool] = Field( - None, description='To not share nodes with other running jobs.', title='Exclusive' - ) - queue: Optional[str] = Field(None, description="Scheduler's queue.", title='Queue') - - -class KubernetesKedaBackend(BaseModel): - plugin_name: Literal['kubernetes'] = Field(..., title='Plugin Name') - debug: Optional[bool] = Field( - False, description='Enable additional debugging of the backend', title='Debug' - ) - job_script_template_path: Optional[str] = Field( - None, - description='Path to the job script template to use in the backend', - title='Job Script Template Path', - ) - working_dir: Optional[str] = Field( - None, description='Working directory to use in the backend', title='Working Dir' - ) - env: Optional[Dict[str, Any]] = Field( - None, description='Static environment variables needed for job execution', title='Env' - ) - cpu_limit: Optional[str] = Field( - '1.0', description='CPU limit applied to each evaluator instance', title='Cpu Limit' - ) - memory_limit: Optional[str] = Field( - '250M', description='Memory limit applied to each evaluator instance', title='Memory Limit' - ) - namespace: Optional[str] = Field( - 'default', - description='Kubernetes namespace to use to scale evaluators', - title='Namespace', - ) - target_resource_kind: Optional[str] = Field( - 'job', - description='Kubernetes resource kind that REP scaler should scale, should be one of job, deployment, statefulset', - title='Target Resource Kind', - ) - - -class KubernetesResourceScaling(BaseModel): - plugin_name: Literal['kubernetes_resource_scaling'] = Field(..., title='Plugin Name') - target_resource_kind: Optional[str] = Field( - 'job', - description='Kubernetes resource kind that REP scaler should scale, should be one of job, deployment, statefulset', - title='Target Resource Kind', - ) - - -class LocalBackend(BaseModel): - plugin_name: Literal['local'] = Field(..., title='Plugin Name') - debug: Optional[bool] = Field( - False, description='Enable additional debugging of the backend', title='Debug' - ) - working_dir: Optional[str] = Field( - None, description='Working directory to use in the backend', title='Working Dir' - ) - env: Optional[Dict[str, Any]] = Field( - None, description='Static environment variables needed for job execution', title='Env' - ) - - -class Machine(BaseModel): - name: str = Field(..., description='Name of the machine', title='Name') - num_cores: int = Field(..., description='Number of cores available', title='Num Cores') - - -class MaxAvailableResourceScaling(BaseModel): - plugin_name: Literal['max_available_resource_scaling'] = Field(..., title='Plugin Name') - match_all_requirements: Optional[bool] = Field( - False, - description='Whether scaling should work with available resource properties specified in compute resource set (default) or require a match of all requirements of the task definition.', - title='Match All Requirements', - ) - - -class MockupBackend(BaseModel): - plugin_name: Literal['mockup'] = Field(..., title='Plugin Name') - debug: Optional[bool] = Field( - False, description='Enable additional debugging of the backend', title='Debug' - ) - - -class Node(BaseModel): - name: Optional[str] = Field(None, description='Node name', title='Name') - total_memory_mb: Optional[int] = Field(..., description='Total memory', title='Total Memory Mb') - total_cores: Optional[int] = Field(..., description='Number of cores', title='Total Cores') - additional_props: Optional[Dict[str, Any]] = Field({}, title='Additional Props') - - -class NodeGroup(BaseModel): - node_names: List[str] = Field(..., title='Node Names') - memory_per_node_mb: Optional[int] = Field( - ..., description='Total Memory per node', title='Memory Per Node Mb' - ) - cores_per_node: Optional[int] = Field( - ..., description='Total cores per node', title='Cores Per Node' - ) - - -class PlatformEnum(Enum): - windows = 'windows' - linux = 'linux' - darwin = 'darwin' - - -class ProblemDetail(BaseModel): - type: Optional[str] = Field(None, title='Type') - title: Optional[str] = Field(None, title='Title') - status: int = Field(..., title='Status') - detail: str = Field(..., title='Detail') - instance: Optional[str] = Field(None, title='Instance') - - -class ProcessLauncherProcessRunner(BaseModel): - plugin_name: Literal['process_launcher_module'] = Field(..., title='Plugin Name') - default_user: Optional[str] = Field( - None, description='The user to use when none is specified', title='Default User' - ) - timeout: Optional[int] = Field( - 30, description='Timeout in seconds before the request is aborted.', title='Timeout' - ) - allowed_users: Optional[List[str]] = Field( - None, description='Users allowed to launch processes', title='Allowed Users' - ) - disallowed_users: Optional[List[str]] = Field( - ['root'], description='Users not allowed to launch processes', title='Disallowed Users' - ) - user_mapping: Optional[Dict[str, str]] = Field( - {}, description='Map of calling user to system user', title='User Mapping' - ) - minimum_uid: Optional[int] = Field( - 1000, description='Minimum UID of users allowed to launch processes', title='Minimum Uid' - ) - minimum_gid: Optional[int] = Field( - 1000, description='Minimum GID of users allowed to launch processes', title='Minimum Gid' - ) - - -class Queue(BaseModel): - name: Optional[str] = Field(None, description='Queue name', title='Name') - node_groups: Optional[List[NodeGroup]] = Field( - None, - description='List of node groups associated with the queue (if available)', - title='Node Groups', - ) - additional_props: Optional[Dict[str, Any]] = Field({}, title='Additional Props') - - -class Resources(BaseModel): - num_cores: Optional[int] = Field(None, description='Number of cores', title='Num Cores') - platform: Optional[PlatformEnum] = Field( - None, description="Basic platform information: 'windows' or 'linux'" - ) - memory: Optional[int] = Field(None, description='Amount of RAM in bytes', title='Memory') - disk_space: Optional[int] = Field( - None, description='Amount of disk space in bytes', title='Disk Space' - ) - custom: Optional[Dict[str, Optional[Union[bool, int, str, float]]]] = Field( - {}, description='Custom resource properties.', title='Custom' - ) - num_instances: Optional[int] = Field( - None, - description='Number of instances/jobs that can be created on the compute resource set', - title='Num Instances', - ) - - -class RestLauncherProcessRunner(BaseModel): - plugin_name: Literal['process_launcher_service'] = Field(..., title='Plugin Name') - launcher_url: Optional[str] = Field( - 'http://localhost:4911', - description='URL to use when none is specified', - title='Launcher Url', - ) - verify_ssl: Optional[bool] = Field( - True, description='Check the SSL certificate for HTTPS launchers', title='Verify Ssl' - ) - timeout: Optional[int] = Field( - 30, description='Timeout in seconds before the request is aborted.', title='Timeout' - ) - shell: Optional[bool] = Field( - True, description='Enable the shell interpretation on subprocess run', title='Shell' - ) - - -class ScalerApplicationInfo(BaseModel): - name: str = Field(..., description='Application name', title='Name') - version: str = Field(..., description='Application version', title='Version') - install_path: str = Field( - ..., description='Installation path of application', title='Install Path' - ) - executable: str = Field( - ..., description='Executable path to run application', title='Executable' - ) - environment: Optional[Dict[str, Any]] = Field( - None, description='Environment setup for the process', title='Environment' - ) - capabilities: Optional[List[str]] = Field( - None, description='Capabilities of the application', title='Capabilities' - ) - customization_hook: Optional[Dict[str, Any]] = Field( - None, - description='Details of a custom hook used to modify the configuration before runs are performed', - title='Customization Hook', - ) - resource_name: Optional[str] = Field( - None, - description='Kubernetes object (deployment/statefulset) name to use as target resource by keda', - title='Resource Name', - ) - scaling_max_eval_instances: Optional[int] = Field( - 1, - description='Maximum number of instances that can be created when scaling up', - title='Scaling Max Eval Instances', - ) - scaling_min_eval_instances: Optional[int] = Field( - 0, - description='Minimum number of instances than can be terminated when scaling down', - title='Scaling Min Eval Instances', - ) - scaling_threshold: Optional[int] = Field( - 1, - description='Threshold value to determine when Kubernetes deployments should be scaled up or down', - title='Scaling Threshold', - ) - cool_down_period: Optional[int] = Field( - 60, - description='Period to wait after the last trigger reported active before scaling the resource back to 0', - title='Cool Down Period', - ) - - -class ScalerRegistration(BaseModel): - id: Optional[str] = Field(None, description='Unique ID for this worker', title='Id') - name: Optional[str] = Field(None, description='User-defined name for this worker.', title='Name') - last_modified: Optional[datetime] = Field( - None, - description='Date and time when the registration was last modified.', - title='Last Modified', - ) - host_id: Optional[str] = Field( - None, description='Static hardware and configuration-based UUID.', title='Host Id' - ) - host_name: Optional[str] = Field( - None, description='Name of the host on which the worker is running', title='Host Name' - ) - username: Optional[str] = Field( - None, description='Username that the evaluator authenticated with.', title='Username' - ) - platform: Optional[str] = Field( - None, description='OS that the evaluator is running on.', title='Platform' - ) - build_info: Optional[Dict[str, Any]] = Field( - {}, description='Detailed build information.', title='Build Info' - ) - config_modified: Optional[datetime] = Field( - None, - description="Date and time of the configuration's last modification", - title='Config Modified', - ) - - -class ScalersCountResponse(BaseModel): - num_scalers: Optional[int] = Field(0, title='Num Scalers') - - -class ScalersRequest(BaseModel): - scalers: List[ScalerRegistration] = Field(..., description='Scaler details', title='Scalers') - - -class ScalersResponse(BaseModel): - scalers: List[ScalerRegistration] = Field(..., description='Scaler details', title='Scalers') - - -class ServiceUserProcessRunner(BaseModel): - plugin_name: Literal['service_user_module'] = Field(..., title='Plugin Name') - - -class Status(BaseModel): - time: str = Field(..., title='Time') - build: Dict[str, Any] = Field(..., title='Build') - - -class ClusterInfo(BaseModel): - id: Optional[str] = Field(None, description='Unique ID for database', title='Id') - crs_id: Optional[str] = Field(None, description='Compute resource set ID', title='Crs Id') - name: Optional[str] = Field(None, description='Cluster name', title='Name') - queues: Optional[List[Queue]] = Field([], title='Queues') - nodes: Optional[List[Node]] = Field([], title='Nodes') - additional_props: Optional[Dict[str, Dict[str, Any]]] = Field({}, title='Additional Props') - - -class Context(BaseModel): - custom: Optional[Dict[str, Optional[Union[int, bool, str, float]]]] = Field( - {}, description='Custom runtime properties.', title='Custom' - ) - machines_list: Optional[List[Machine]] = Field( - None, - description='List of machines for distributed parallel processing.', - title='Machines List', - ) - - -class EvaluatorResources(BaseModel): - num_cores: Optional[int] = Field(None, description='Number of cores', title='Num Cores') - platform: Optional[PlatformEnum] = Field( - None, description="Basic platform information: 'windows' or 'linux'" - ) - memory: Optional[int] = Field(None, description='Amount of RAM in bytes', title='Memory') - disk_space: Optional[int] = Field( - None, description='Amount of disk space in bytes', title='Disk Space' - ) - custom: Optional[Dict[str, Optional[Union[bool, int, str, float]]]] = Field( - {}, description='Custom resource properties.', title='Custom' - ) - hpc_resources: Optional[HpcResources] = None - - -class OrchestrationInterfacesBackend(BaseModel): - plugin_name: Literal['orchestration_interfaces'] = Field(..., title='Plugin Name') - debug: Optional[bool] = Field( - False, description='Enable additional debugging of the backend', title='Debug' - ) - scheduler_type: Optional[str] = Field( - 'slurm', - description='Job scheduler type to use in the backend (slurm, pbs, uge...)', - title='Scheduler Type', - ) - scheduler_queue_default: Optional[str] = Field( - None, - description='Job scheduler queue to use for submission', - title='Scheduler Queue Default', - ) - scheduler_command_override: Optional[str] = Field( - None, - description='Path to the JSON file with custom scheduler command definitions', - title='Scheduler Command Override', - ) - scheduler_script_override: Optional[str] = Field( - None, - description='Path to the shell script to template for the scheduler', - title='Scheduler Script Override', - ) - exclusive_default: Optional[bool] = Field( - False, - description='Request the scheduler to hold the nodes exclusively for one request', - title='Exclusive Default', - ) - distributed_default: Optional[bool] = Field( - True, - description='Allow the scheduler to provide multiple machines to fulfill the request', - title='Distributed Default', - ) - num_cores_default: Optional[int] = Field( - 1, - description='Number of cores to request from the scheduler for a task', - title='Num Cores Default', - ) - working_dir: Optional[str] = Field( - None, description='Working directory to use in the backend', title='Working Dir' - ) - env: Optional[Dict[str, Any]] = Field( - None, description='Static environment variables needed for job execution', title='Env' - ) - process_runner: Optional[ - Union[ServiceUserProcessRunner, ProcessLauncherProcessRunner, RestLauncherProcessRunner] - ] = Field( - {'plugin_name': 'service_user_module'}, - description='Process runner used to execute commands', - discriminator='plugin_name', - title='Process Runner', - ) - create_workdir: Optional[bool] = Field( - True, - description='Create base and/or user-specific working directories at runtime', - title='Create Workdir', - ) - use_templates: Optional[bool] = Field( - True, - description='Use the templated versions of the scripts and write them to the working directory', - title='Use Templates', - ) - - -class ComputeResourceSet(BaseModel): - name: Optional[str] = Field( - 'default', description='Name of the compute resource set.', title='Name' - ) - id: Optional[str] = Field(None, description='Unique ID for this set.', title='Id') - scaler_id: Optional[str] = Field( - None, description='Temporary. To be removed after transitioning to ``client_id``.', title='Scaler Id' - ) - last_modified: Optional[datetime] = Field( - None, description='Last modified time.', title='Last Modified' - ) - backend: Optional[ - Union[KubernetesKedaBackend, OrchestrationInterfacesBackend, LocalBackend, MockupBackend] - ] = Field( - {'debug': False, 'plugin_name': 'local'}, - description='Backend to use in this compute resource set.', - discriminator='plugin_name', - title='Backend', - ) - scaling_strategy: Optional[Union[MaxAvailableResourceScaling, KubernetesResourceScaling]] = ( - Field( - {'match_all_requirements': False, 'plugin_name': 'max_available_resource_scaling'}, - description='Scaling strategy to use in this compute resource set.', - discriminator='plugin_name', - title='Scaling Strategy', - ) - ) - available_resources: Optional[Resources] = Field( - {'custom': {}}, description='Available resources in the compute resource set.' - ) - available_applications: Optional[List[ScalerApplicationInfo]] = Field( - [], description='List of available applications.', title='Available Applications' - ) - evaluator_requirements_matching: Optional[bool] = Field( - False, - description='Whether the evaluators should do matching of resource and software requirements.', - title='Evaluator Requirements Matching', - ) - evaluator_task_directory_cleanup: Optional[EvaluatorTaskDirectoryCleanup] = Field( - 'always', - description='Cleanup policy for task directories that are passed to evaluators.', - title='Evaluator Task Directory Cleanup', - ) - evaluator_auto_shutdown_time: Optional[int] = Field( - 20, - description='Time after which to shut down the evaluator if not running any jobs.', - title='Evaluator Auto Shutdown Time', - ) - evaluator_loop_interval: Optional[int] = Field( - 5, - description='Main evaluator loop is repeated every ``loop_interval`` seconds.', - title='Evaluator Loop Interval', - ) - - -class ComputeResourceSetsRequest(BaseModel): - compute_resource_sets: List[ComputeResourceSet] = Field( - ..., description='Compute resource set details', title='Compute Resource Sets' - ) - - -class ComputeResourceSetsResponse(BaseModel): - compute_resource_sets: List[ComputeResourceSet] = Field( - ..., description='Compute resource set details', title='Compute Resource Sets' - ) - - -class EvaluatorConfiguration(BaseModel): - id: Optional[str] = Field(None, description='Unique DB ID (read-only)', title='Id') - evaluator_id: Optional[str] = Field( - None, description='ID of the parent evaluator (read-only).', title='Evaluator Id' - ) - last_modified: Optional[datetime] = Field( - None, description='Last modified time.', title='Last Modified' - ) - working_directory: Optional[str] = Field(None, title='Working Directory') - local_file_cache_max_size: Optional[int] = Field( - None, - description='Maximum allowed cache size in bytes or ``None``.', - title='Local File Cache Max Size', - ) - max_num_parallel_tasks: Optional[int] = Field(None, title='Max Num Parallel Tasks') - task_directory_cleanup: Optional[TaskDirectoryCleanupEnum] = Field( - None, title='Task Directory Cleanup' - ) - resources: Optional[EvaluatorResources] = {'custom': {}} - task_manager_type: Optional[str] = Field(None, title='Task Manager Type') - loop_interval: Optional[float] = Field( - 5.0, - description='Main evaluator loop is repeated every ``loop_interval`` seconds.', - title='Loop Interval', - ) - local_file_cache: Optional[bool] = Field( - True, - description='Whether to configure a local file cache in the file tool.', - title='Local File Cache', - ) - applications: Optional[List[ApplicationInfo]] = Field( - [], description='List of available applications.', title='Applications' - ) - project_server_select: Optional[bool] = Field( - True, - description='Get project assignments from the server instead of using the locally set values.', - title='Project Server Select', - ) - project_list: Optional[List[str]] = Field( - [], - description='IDs of the projects that the evaluator should work on in order.', - title='Project List', - ) - project_assignment_mode: Optional[str] = Field( - 'all_active', - description='How the evaluator is to select projects to work on. Options are ``all_active``, ``disabled``, and ``list``.', - title='Project Assignment Mode', - ) - context: Optional[Context] = Field( - {'custom': {}}, description='Runtime properties to pass to executed tasks.' - ) - - -class EvaluatorConfigurationUpdate(BaseModel): - id: Optional[str] = Field(None, description='Unique DB ID (read-only)', title='Id') - evaluator_id: Optional[str] = Field( - None, description='ID of the parent evaluator (read-only).', title='Evaluator Id' - ) - last_modified: Optional[datetime] = Field( - None, description='Last modified time.', title='Last Modified' - ) - working_directory: Optional[str] = Field(None, title='Working Directory') - local_file_cache_max_size: Optional[int] = Field( - None, - description='Maximum allowed cache size in bytes or ``None``.', - title='Local File Cache Max Size', - ) - max_num_parallel_tasks: Optional[int] = Field(None, title='Max Num Parallel Tasks') - task_directory_cleanup: Optional[TaskDirectoryCleanupEnum] = Field( - None, title='Task Directory Cleanup' - ) - resources: Optional[EvaluatorResources] = {'custom': {}} - name: Optional[str] = Field( - None, description='Update the name of the evaluator (updating the registration).', title='Name' - ) - loop_interval: Optional[float] = Field( - None, - description='Main evaluator loop is repeated every ``loop_interval`` seconds.', - title='Loop Interval', - ) - local_file_cache: Optional[bool] = Field( - None, - description='Whether to configure a local file cache in the file tool.', - title='Local File Cache', - ) - applications: Optional[List[ApplicationInfo]] = Field( - [], description='List of available applications.', title='Applications' - ) - project_list: Optional[List[str]] = Field( - None, - description='IDs of the projects that the evaluator should work on in order.', - title='Project List', - ) - project_assignment_mode: Optional[str] = Field( - None, - description='How the evaluator selects projects to work on. One of: disabled, all_active, list', - title='Project Assignment Mode', - ) - context: Optional[ContextUpdate] = Field( - {'custom': {}}, description='Runtime properties to pass to executed tasks.' - ) - - -class EvaluatorConfigurationUpdatesRequest(BaseModel): - configuration_updates: List[EvaluatorConfigurationUpdate] = Field( - ..., description='Configuration update details', title='Configuration Updates' - ) - - -class EvaluatorConfigurationUpdatesResponse(BaseModel): - configuration_updates: List[EvaluatorConfigurationUpdate] = Field( - ..., description='Configuration update details', title='Configuration Updates' - ) - - -class EvaluatorConfigurationsResponse(BaseModel): - configurations: List[EvaluatorConfiguration] = Field( - ..., description='Evaluator configurations', title='Configurations' - ) diff --git a/build.py b/build.py deleted file mode 100644 index 2514e5d8..00000000 --- a/build.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. -# SPDX-License-Identifier: MIT -# -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import argparse -import logging -import os -import subprocess -import sys - -log = logging.getLogger(__name__) - -file_formatter = logging.Formatter("[%(asctime)s/%(levelname)5.5s] %(message)s") -stream_formatter = logging.Formatter( - "[%(asctime)s/%(levelname)5.5s] %(message)s", datefmt="%H:%M:%S" -) -file_handler = logging.FileHandler("bootstrap.log") -file_handler.setFormatter(file_formatter) -file_handler.setLevel(logging.DEBUG) - -stream_handler = logging.StreamHandler() -stream_handler.setFormatter(stream_formatter) -stream_handler.setLevel(logging.DEBUG) - -log.addHandler(file_handler) -log.addHandler(stream_handler) -log.setLevel(logging.DEBUG) - - -class Context(object): - pip_index_url = "https://pypi.python.org/simple" - pip_trusted_host = "pypi.python.org" - - def __init__(self, args): - self.args = args - self.venv_name = args.venv_name - self.using_docker = os.environ.get("DEFAULT_DOCKCROSS_IMAGE", None) is not None - - if sys.platform == "win32": - system_python_binary = f'"{sys.executable}"' - python_binary = os.path.join(self.args.venv_name, "Scripts", "python.exe") - else: - if self.using_docker: - # When running inside of our dockcross container python3 is actually Ansys python - log.info("Running inside dockcross container") - system_python_binary = "python3" - else: - system_python_binary = sys.executable - python_binary = os.path.join(self.args.venv_name, "bin", "python") - - self.system_python_binary = system_python_binary - self.python_binary = os.path.abspath(python_binary) - log.debug(f"Python at: {self.system_python_binary}") - log.debug(f"Virtual Env Python at: {self.python_binary}") - - -def _do_venv(context): - log.info("### Preparing venv %s" % context.venv_name) - - subprocess.run( - f"{context.system_python_binary} -m venv {context.venv_name}", shell=True, check=True - ) - - pip_conf_path = ( - os.path.join(context.venv_name, "pip.ini") - if sys.platform == "win32" - else os.path.join(context.venv_name, "pip.conf") - ) - - pip_conf = "[global]" - pip_conf += "\ntimeout = 60" - pip_conf += "\nindex-url = " + context.pip_index_url - pip_conf += "\ntrusted-host = " + context.pip_trusted_host - - log.debug("Writing %s" % pip_conf_path) - with open(pip_conf_path, "w") as f: - f.write(pip_conf) - - log.info("### Installing base modules") - - pip_options = [] - if context.args.verbose: - pip_options.append(" -v") - - log.info("### Updating pip") - subprocess.run(f"{context.python_binary} -m pip install --upgrade pip", shell=True, check=True) - - # Install requirements - build_reqs = os.path.join(os.path.dirname(__file__), "requirements", "requirements_build.txt") - subprocess.run( - f"{context.python_binary} -m pip install -r {build_reqs}", shell=True, check=True - ) - test_reqs = os.path.join(os.path.dirname(__file__), "requirements", "requirements_tests.txt") - subprocess.run(f"{context.python_binary} -m pip install -r {test_reqs}", shell=True, check=True) - - # Install client - subprocess.run(f"{context.python_binary} -m pip install -e .", shell=True, check=True) - - -def _do_wheel(context): - log.info("### Build python client wheel") - subprocess.run(f"{context.python_binary} setup.py sdist bdist_wheel", shell=True, check=True) - - -def _do_documentation(context): - log.info("### Preparing python client documentation") - - docs_directory = os.path.join(os.path.dirname(__file__), "doc", "source") - target_directory = os.path.join(os.path.dirname(__file__), "build", "sphinx", "html") - doc_reqs = os.path.join(os.path.dirname(__file__), "requirements", "requirements_doc.txt") - subprocess.run(f"{context.python_binary} -m pip install -r {doc_reqs}", shell=True, check=True) - subprocess.run(f"{context.python_binary} archive_examples.py", shell=True, check=True) - subprocess.run( - f"{context.python_binary} -m sphinx -b html {docs_directory} {target_directory}", - shell=True, - check=True, - ) - - -def _run_tests(context): - cmd = ( - f"{context.python_binary} -m pytest -v --junitxml test_results.xml " - + "--cov=ansys --cov-report=xml --cov-report=html" - ) - subprocess.run(f"{cmd}", shell=True, check=True) - - -steps = [ - ("venv", _do_venv), - ("wheel", _do_wheel), - ("documentation", _do_documentation), - ("tests", _run_tests), -] - - -def main(context): - steps_to_run = set(context.args.selected_steps) - steps_to_run.difference_update(context.args.disabled_steps) - - ordered_steps_to_run = [v[0] for v in steps if v[0] in steps_to_run] - log.debug("Running steps: %s" % ", ".join(ordered_steps_to_run)) - for step_name, step_impl in steps: - if step_name not in steps_to_run: - continue - log.info(f"Running step: {step_name}") - step_impl(context) - - log.info(f"All done!") - - if "venv" in steps_to_run: - log.info("Remember to activate the venv ...") - - -if __name__ == "__main__": - step_names = [v[0] for v in steps] - - parser = argparse.ArgumentParser(description="Build") - parser.add_argument( - "selected_steps", - nargs="*", - default="all", - choices=step_names + ["all"], - help="Steps selected to run", - ) - parser.add_argument( - "-n", - "--no", - dest="disabled_steps", - action="append", - help="Disable selected steps", - default=[], - choices=step_names, - ) - parser.add_argument("-V", "--venv-name", default="dev_env", help="Name of venv to create") - parser.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity") - - args = parser.parse_args() - - if args.selected_steps == "all": - args.selected_steps = list(step_names) - - context = Context(args) - - main(context) diff --git a/doc/.vale.ini b/doc/.vale.ini index da32d0e7..933990bc 100644 --- a/doc/.vale.ini +++ b/doc/.vale.ini @@ -24,6 +24,9 @@ Vocab = ANSYS [*.{md,rst}] +# By default, `class`, `func`, and `exc` are ignored +TokenIgnores = (:class:`.*`|:func:`.*`|:exc:`.*`) + # Apply the following styles BasedOnStyles = Vale, Google diff --git a/doc/Makefile b/doc/Makefile index d5066f42..6b2d948a 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -13,14 +13,26 @@ help: .PHONY: help Makefile +# Install dependencies for sphinx-autoapi +.install-deps: + @pip freeze | grep -q "sphinx-autoapi @ git+https://github.com/ansys/sphinx-autoapi" && is_custom_sphinx_autoapi_installed="yes" || is_custom_sphinx_autoapi_installed="no" + @if [ "$$is_custom_sphinx_autoapi_installed" != "yes" ]; then \ + pip uninstall --yes sphinx-autoapi; \ + pip install "sphinx-autoapi @ git+https://github.com/ansys/sphinx-autoapi@feat/single-page-stable"; \ + fi + # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile +%: .install-deps Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +pdf: .install-deps + @$(SPHINXBUILD) -M latex "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + cd $(BUILDDIR)/latex && latexmk -r latexmkrc -pdf *.tex -interaction=nonstopmode || true + (test -f $(BUILDDIR)/latex/*.pdf && echo pdf exists) || exit 1 # Customized clean due to examples gallery -clean: +clean: .install-deps rm -rf $(BUILDDIR)/* rm -rf $(SOURCEDIR)/examples find . -type d -name "_autosummary" -exec rm -rf {} + diff --git a/doc/make.bat b/doc/make.bat index fbf40050..e64ac6b3 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -10,6 +10,13 @@ if "%SPHINXBUILD%" == "" ( set SOURCEDIR=source set BUILDDIR=_build +REM TODO: these lines of code should be removed once the feature branch is merged +for /f %%i in ('pip freeze ^| findstr /c:"sphinx-autoapi @ git+https://github.com/ansys/sphinx-autoapi"') do set is_custom_sphinx_autoapi_installed=%%i +if NOT "%is_custom_sphinx_autoapi_installed%" == "sphinx-autoapi" ( + pip uninstall --yes sphinx-autoapi + pip install "sphinx-autoapi @ git+https://github.com/ansys/sphinx-autoapi@feat/single-page-stable") +REM TODO: these lines of code should be removed once the feature branch is merged + if "%1" == "" goto help if "%1" == "clean" goto clean @@ -29,6 +36,11 @@ if errorlevel 9009 ( %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end +:pdf + %SPHINXBUILD% -M latex %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + cd "%BUILDDIR%\latex" + pdflatex \*.tex --interaction=nonstopmode + :clean rmdir /s /q %BUILDDIR% > /NUL 2>&1 for /d /r %SOURCEDIR% %%d in (_autosummary) do @if exist "%%d" rmdir /s /q "%%d" diff --git a/doc/source/api/auth.rst b/doc/source/api/auth.rst index 594bd6fc..f9722c99 100644 --- a/doc/source/api/auth.rst +++ b/doc/source/api/auth.rst @@ -1,5 +1,5 @@ -Authentication Service -=========================== +Authentication service +====================== `Keycloak `_ is used for identity and access management. This open source solution provides a variety of options for authentication and authorization. Users authenticate diff --git a/doc/source/api/jms.rst b/doc/source/api/jms.rst index 929c9164..a124d0e0 100644 --- a/doc/source/api/jms.rst +++ b/doc/source/api/jms.rst @@ -124,7 +124,7 @@ Design exploration algorithm Evaluator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^ .. autoclass:: ansys.hps.client.jms.Evaluator :members: diff --git a/doc/source/conf.py b/doc/source/conf.py index 4f273fd4..09a1d8d0 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,12 +1,17 @@ # Sphinx documentation configuration file from datetime import datetime import os +from pathlib import Path import sys -from ansys_sphinx_theme import get_version_match -from ansys_sphinx_theme import pyansys_logo_black as logo +from ansys_sphinx_theme import ( + ansys_favicon, + get_autoapi_templates_dir_relative_path, + get_version_match, + pyansys_logo_black, +) -from ansys.hps.client import __ansys_apps_version__, __company__, __version__, __version_no_dots__ +from ansys.hps.client import __ansys_apps_version__, __version__ sys.path.append(os.path.abspath(os.path.dirname(__file__))) @@ -15,9 +20,9 @@ # General information about the project. project = "Ansys pyhps" copyright = f"(c) {datetime.now().year} ANSYS, Inc. All rights reserved" -author = __company__ - -cname = os.getenv("DOCUMENTATION_CNAME", "rep.docs.pyansys.com") +author = "ANSYS Inc." +cname = os.getenv("DOCUMENTATION_CNAME", "hps.docs.pyansys.com") +switcher_version = get_version_match(__version__) """The canonical name of the webpage hosting the documentation.""" # The short X.Y version @@ -32,21 +37,68 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.extlinks", - # "sphinx.ext.viewcode", # to show python source code - "sphinxcontrib.httpdomain", - "sphinxcontrib.globalsubs", + "autoapi.extension", + "sphinx_autodoc_typehints", + "numpydoc", "sphinx.ext.intersphinx", "sphinx_copybutton", - "sphinxnotes.strike", - "sphinx_autodoc_typehints", - "sphinxcontrib.autodoc_pydantic", + "sphinx_design", ] +exclude_patterns = ["_autoapi_templates", "_build", "Thumbs.db", ".DS_Store"] + +# Configuration for Sphinx autoapi +autoapi_type = "python" +autoapi_dirs = ["../../src/ansys"] +autoapi_root = "api" +autoapi_options = [ + "members", + "undoc-members", + "show-inheritance", + "show-module-summary", + "special-members", +] +autoapi_template_dir = get_autoapi_templates_dir_relative_path(Path(__file__)) +suppress_warnings = ["autoapi.python_import_resolution"] +autoapi_python_use_implicit_namespaces = True +autoapi_keep_files = True +autoapi_render_in_single_page = ["class", "enum", "exception"] + +# Intersphinx mapping +intersphinx_mapping = { + "python": ("https://docs.python.org/3.11", None), + "numpy": ("https://numpy.org/doc/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy/", None), + "pyvista": ("https://docs.pyvista.org/version/stable", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "pint": ("https://pint.readthedocs.io/en/stable", None), + "beartype": ("https://beartype.readthedocs.io/en/stable/", None), + "docker": ("https://docker-py.readthedocs.io/en/stable/", None), + "pypim": ("https://pypim.docs.pyansys.com/version/stable", None), + "ansys.hps.client": (f"https://hps.docs.pyansys.com/version/{switcher_version}", None), +} + +# numpydoc configuration +numpydoc_show_class_members = False +numpydoc_xref_param_type = True + +# Consider enabling numpydoc validation. See: +# https://numpydoc.readthedocs.io/en/latest/validation.html# +numpydoc_validate = True +numpydoc_validation_checks = { + "GL06", # Found unknown section + "GL07", # Sections are in the wrong order. + # "GL08", # The object does not have a docstring + "GL09", # Deprecation warning should precede extended summary + "GL10", # reST directives {directives} must be followed by two colons + "SS01", # No summary found + "SS02", # Summary does not start with a capital letter + # "SS03", # Summary does not end with a period + "SS04", # Summary contains heading whitespaces + # "SS05", # Summary must start with infinitive verb, not third person + "RT02", # The first line of the Returns section should contain only the + # type, unless multiple values are being returned" +} # autodoc/autosummary flags autoclass_content = "both" @@ -54,6 +106,19 @@ autosummary_generate = True +def prepare_jinja_env(jinja_env) -> None: + """ + Customize the jinja env. + + Notes + ----- + See https://jinja.palletsprojects.com/en/3.0.x/api/#jinja2.Environment + """ + jinja_env.globals["project_name"] = project + + +autoapi_prepare_jinja_env = prepare_jinja_env + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -120,14 +185,14 @@ # only for sphinx_rtd_theme html_theme_options = { - "github_url": "https://github.com/pyansys/pyhps", + "github_url": "https://github.com/ansys-internal/pyhps", "show_prev_next": False, "show_breadcrumbs": True, "additional_breadcrumbs": [ ("PyAnsys", "https://docs.pyansys.com/"), ], "collapse_navigation": True, - "navigation_depth": 4, + "navigation_depth": 5, "check_switcher": False, "switcher": { "json_url": f"https://{cname}/release/versions.json", # noqa: E231 @@ -141,10 +206,10 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = logo +html_logo = pyansys_logo_black # Favicon -html_favicon = "favicon.png" +html_favicon = ansys_favicon # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -180,7 +245,7 @@ latex_documents = [ ( "index", - "ansys-pyhps.tex", + "ansys-hps-client.tex", "Ansys HPS Python Client Documentation", author, "manual", @@ -212,7 +277,9 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [("index", "ansys-pyhps", "Ansys HPS Python Client Documentation", ["ANSYS, Inc."], 1)] +man_pages = [ + ("index", "ansys-hps-client", "Ansys HPS Python Client Documentation", ["ANSYS, Inc."], 1) +] # If true, show URL addresses after external links. # man_show_urls = False @@ -226,7 +293,7 @@ # texinfo_documents = [ # ( # "index", -# "ansys-pyhps", +# "ansys-hps-client", # "Ansys HPS Python Client Documentation", # "ANSYS, Inc.", # "JMS", @@ -249,7 +316,7 @@ global_substitutions = { "client_version": __version__, - "version_no_dots": __version_no_dots__, + "version_no_dots": __version__.replace(".", ""), "external_version": __ansys_apps_version__, "ansys_version": __ansys_apps_version__, } diff --git a/doc/source/examples/ex_fluent_nozzle.rst b/doc/source/examples/ex_fluent_nozzle.rst index 37948ad3..53bdc3eb 100644 --- a/doc/source/examples/ex_fluent_nozzle.rst +++ b/doc/source/examples/ex_fluent_nozzle.rst @@ -1,6 +1,6 @@ .. _example_fluent_nozzle: -Fluent Nozzle +Fluent nozzle ============= This example shows how to submit a Fluent nozzle model for solving on REP. diff --git a/examples/python_linked_multi_process_step/eval.py b/examples/python_linked_multi_process_step/eval.py index 2b5eab20..76d1d506 100644 --- a/examples/python_linked_multi_process_step/eval.py +++ b/examples/python_linked_multi_process_step/eval.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/examples/python_multi_process_step/eval.py b/examples/python_multi_process_step/eval.py index b7fb25b3..bd695807 100644 --- a/examples/python_multi_process_step/eval.py +++ b/examples/python_multi_process_step/eval.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/examples/python_multi_process_step/task_files.py b/examples/python_multi_process_step/task_files.py index 2e544f13..5e08936d 100644 --- a/examples/python_multi_process_step/task_files.py +++ b/examples/python_multi_process_step/task_files.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/generate_resources.py b/generate_resources.py index e0edd89a..75ca5bdd 100644 --- a/generate_resources.py +++ b/generate_resources.py @@ -377,7 +377,7 @@ def _extract_field_type(v, resources) -> str: def declared_fields(schema, resources): """ - Helper function to retrieve the fields that will be defined as class members for an object + Helper function to retrieve the fields that is defined as class members for an object """ fields = [] fields_doc = [] diff --git a/pyproject.toml b/pyproject.toml index 8ff20f05..3f284f37 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,83 @@ [build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" +requires = ["flit_core >=3.2,<4"] +build-backend = "flit_core.buildapi" + +[project] +# Check https://flit.readthedocs.io/en/latest/pyproject_toml.html for all available sections +name = "ansys-hps-client" +version = "0.7.dev0" +description = "A python client for Ansys HPC Platform Services" +readme = "README.rst" +requires-python = ">=3.9,<4.0" +license = {file = "LICENSE"} +authors = [ + {name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}, +] +maintainers = [ + {name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}, +] + +classifiers = [ + "Development Status :: 4 - Beta", + 'Intended Audience :: Science/Research', + 'Topic :: Scientific/Engineering :: Information Analysis', + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +dependencies = [ + "requests>=2.21.0", + "marshmallow>=3.0.0", + "marshmallow_oneofschema>=2.0.1", + "python-keycloak>=1.5.0,<=2.12.0", + "setuptools;python_version>='3.12'", # Python3.12 doesn't include setuptools automatically + "backoff>=2.0.0", + "pydantic>=1.10.0", +] + +[project.optional-dependencies] +tests = [ + "pytest==7.4.4", + "pytest-cov==4.1.0", +] + +doc = [ + "ansys-sphinx-theme==0.13.1", + "autodoc_pydantic==2.0.1", + "jupyter_sphinx==0.5.3", + "nbsphinx==0.9.3", + "numpydoc==1.6.0", + "myst-parser==2.0.0", + "Sphinx==7.2.6", + "sphinx-autoapi==3.0.0", + "sphinx-autodoc-typehints==1.25.2", + "sphinxcontrib-globalsubs==0.1.1", + "sphinxcontrib-httpdomain==1.8.1", + "sphinx-copybutton==0.5.2", + "sphinx_design==0.5.0", + "sphinx-jinja==2.0.2", + "sphinxnotes-strike==1.2", +] + +build = [ + "build==1.0.3", + "twine==4.0.2", + "wheel", + "datamodel-code-generator==0.24.2" +] + +[project.urls] +Documentation = "https://hps.docs.pyansys.com" +Source = "https://github.com/ansys-internal/pyhps" +Homepage = "https://github.com/ansys-internal/pyhps" +Tracker = "https://github.com/ansys-internal/pyhps/issues" + +[tool.flit.module] +name = "ansys.hps.client" [tool.black] line-length = 100 @@ -10,19 +87,28 @@ profile = "black" force_sort_within_sections = true line_length = 100 default_section = "THIRDPARTY" -src_paths = ["doc", "ansys", "tests"] +src_paths = ["doc", "src", "tests"] [tool.coverage.run] -source = ["ansys.rep"] +relative_files = true +source = ["ansys.hps"] [tool.coverage.report] show_missing = true +[tool.coverage.html] +directory = ".cov/html" + +[tool.coverage.xml] +output = ".cov/coverage.xml" + [tool.pytest.ini_options] log_cli = true log_cli_level = "INFO" log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)" log_cli_date_format = "%Y-%m-%d %H:%M:%S" +addopts = """-ra -s --durations=0 -p pytest_cov --cov=ansys.hps --cov-report html:.cov/html \ + --cov-report xml:.cov/xml --cov-report term -vv --cov-append""" markers = [ "requires_evaluator: marks tests as end-to-end requiring a running evaluator (deselect with '-m \"not requires_evaluator\"')", ] diff --git a/requirements/requirements_build.txt b/requirements/requirements_build.txt deleted file mode 100644 index 22cd1a5c..00000000 --- a/requirements/requirements_build.txt +++ /dev/null @@ -1,4 +0,0 @@ -build==1.0.3 -twine==4.0.2 -wheel -datamodel-code-generator==0.24.2 \ No newline at end of file diff --git a/requirements/requirements_doc.txt b/requirements/requirements_doc.txt deleted file mode 100644 index 72cbd8e1..00000000 --- a/requirements/requirements_doc.txt +++ /dev/null @@ -1,9 +0,0 @@ -Sphinx==7.2.6 -numpydoc==1.6.0 -ansys-sphinx-theme==0.12.5 -sphinx-copybutton==0.5.2 -sphinxcontrib-httpdomain==1.8.1 -sphinxcontrib-globalsubs==0.1.1 -sphinxnotes-strike==1.2 -sphinx-autodoc-typehints==1.25.2 -autodoc_pydantic==2.0.1 \ No newline at end of file diff --git a/requirements/requirements_tests.txt b/requirements/requirements_tests.txt deleted file mode 100644 index 9a6c85a4..00000000 --- a/requirements/requirements_tests.txt +++ /dev/null @@ -1,2 +0,0 @@ -pytest==7.4.4 -pytest-cov==4.1.0 diff --git a/setup.py b/setup.py deleted file mode 100644 index ca2c7ba6..00000000 --- a/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -import os - -from setuptools import find_namespace_packages, setup - -# We follow option 3 suggested by PyPA -# https://packaging.python.org/guides/single-sourcing-package-version/ -# to get the package version. -root = os.path.abspath(os.path.dirname(__file__)) -about = {} -with open(os.path.join(root, "ansys", "hps", "client", "__version__.py"), "r") as f: - exec(f.read(), about) - -setup( - name="ansys-pyhps", - version=about["__version__"], - url=about["__url__"], - author="ANSYS, Inc.", - author_email="pyansys.support@ansys.com", - maintainer="PyAnsys developers", - maintainer_email="pyansys.maintainers@ansys.com", - classifiers=[ - "Development Status :: 4 - Beta", - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], - license="MIT", - license_file="LICENSE", - description="A Python client for Ansys HPC Platform Services (HPS)", - long_description=open("README.rst").read(), - long_description_content_type="text/x-rst", - install_requires=[ - "requests>=2.21.0", - "marshmallow>=3.0.0", - "marshmallow_oneofschema>=2.0.1", - "python-keycloak>=1.5.0,<=2.12.0", - "backoff>=2.0.0", - "pydantic>=1.10.0", - ], - python_requires=">=3.7", - packages=find_namespace_packages(include=["ansys.*"]), -) diff --git a/ansys/hps/client/__init__.py b/src/ansys/hps/client/__init__.py similarity index 89% rename from ansys/hps/client/__init__.py rename to src/ansys/hps/client/__init__.py index 34c8deff..38353cfb 100644 --- a/ansys/hps/client/__init__.py +++ b/src/ansys/hps/client/__init__.py @@ -19,16 +19,9 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""PyHPS is a Python client for Ansys HPC Platform Services (HPS).""" - -from .__version__ import ( - __ansys_apps_version__, - __company__, - __company_short__, - __url__, - __version__, - __version_no_dots__, -) +from .__version__ import __ansys_apps_version__, __version__ from .auth import AuthApi from .client import Client from .exceptions import APIError, ClientError, HPSError diff --git a/ansys/hps/client/__version__.py b/src/ansys/hps/client/__version__.py similarity index 83% rename from ansys/hps/client/__version__.py rename to src/ansys/hps/client/__version__.py index 7ced2cab..92b542a3 100644 --- a/ansys/hps/client/__version__.py +++ b/src/ansys/hps/client/__version__.py @@ -20,11 +20,14 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -__version__ = "0.6.dev0" -__version_no_dots__ = __version__.replace(".", "") -__company__ = "ANSYS, Inc." -__company_short__ = "Ansys" -__url__ = "https://github.com/ansys-internal/pyhps" +try: + import importlib.metadata as importlib_metadata +except ModuleNotFoundError: # pragma: no cover + import importlib_metadata + +# Read from the pyproject.toml +# major, minor, patch +__version__ = importlib_metadata.version("ansys-hps-client") # this is only a convenience to default the version # of Ansys simulation applications in PyHPS examples diff --git a/ansys/hps/client/auth/__init__.py b/src/ansys/hps/client/auth/__init__.py similarity index 96% rename from ansys/hps/client/auth/__init__.py rename to src/ansys/hps/client/auth/__init__.py index f3cea38f..4cbe9851 100644 --- a/ansys/hps/client/auth/__init__.py +++ b/src/ansys/hps/client/auth/__init__.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS authentication subpackage.""" from .api import AuthApi from .authenticate import authenticate from .resource import User diff --git a/ansys/hps/client/auth/api/__init__.py b/src/ansys/hps/client/auth/api/__init__.py similarity index 96% rename from ansys/hps/client/auth/api/__init__.py rename to src/ansys/hps/client/auth/api/__init__.py index 44989177..aed063c5 100644 --- a/ansys/hps/client/auth/api/__init__.py +++ b/src/ansys/hps/client/auth/api/__init__.py @@ -19,5 +19,5 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS Authentication API subpackage.""" from .auth_api import AuthApi diff --git a/ansys/hps/client/auth/api/auth_api.py b/src/ansys/hps/client/auth/api/auth_api.py similarity index 90% rename from ansys/hps/client/auth/api/auth_api.py rename to src/ansys/hps/client/auth/api/auth_api.py index 5ad0a33f..81c027c7 100644 --- a/ansys/hps/client/auth/api/auth_api.py +++ b/src/ansys/hps/client/auth/api/auth_api.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the Python interface to the Authorization Service API.""" from typing import List @@ -165,7 +165,13 @@ def delete_user(self, user: User) -> None: def _admin_client(client): + """Set information for admin. + Parameters + ---------- + client : Client + HPS client object. + """ custom_headers = { "Authorization": "Bearer " + client.access_token, "Content-Type": "application/json", @@ -183,7 +189,13 @@ def _admin_client(client): def get_users(admin_client: KeycloakAdmin, as_objects=True, **query_params): + """Get users as admin. + Parameters + ---------- + admin_client : KeycloakAdmin + Keycloak admin user. + """ users = admin_client.get_users(query=query_params) if not as_objects: @@ -194,7 +206,15 @@ def get_users(admin_client: KeycloakAdmin, as_objects=True, **query_params): def get_user(admin_client: KeycloakAdmin, id: str, as_objects=True): + """Get user using ID. + Parameters + ---------- + admin_client: KeycloakAdmin + Keycloak admin user. + id : str + User ID. + """ user = admin_client.get_user(user_id=id) if not as_objects: @@ -205,6 +225,15 @@ def get_user(admin_client: KeycloakAdmin, id: str, as_objects=True): def create_user(admin_client: KeycloakAdmin, user: User, as_objects=True): + """Create user. + + Parameters + ---------- + admin_client : KeycloakAdmin + Keycloak admin user. + user : User + HPS user object. + """ schema = UserSchema(many=False) data = schema.dump(user) @@ -223,6 +252,15 @@ def create_user(admin_client: KeycloakAdmin, user: User, as_objects=True): def update_user(admin_client: KeycloakAdmin, user: User, as_objects=True): + """Update user. + + Parameters + ---------- + admin_client : KeycloakAdmin + Keycloak admin user. + user : User + HPS user object. + """ schema = UserSchema(many=False) data = schema.dump(user) diff --git a/ansys/hps/client/auth/authenticate.py b/src/ansys/hps/client/auth/authenticate.py similarity index 97% rename from ansys/hps/client/auth/authenticate.py rename to src/ansys/hps/client/auth/authenticate.py index 7d1d8c8a..ab8aaf96 100644 --- a/ansys/hps/client/auth/authenticate.py +++ b/src/ansys/hps/client/auth/authenticate.py @@ -19,7 +19,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module that provides authentication for the user with a password or refresh token against the +HPS authentication service.""" import logging from typing import Union import urllib.parse diff --git a/ansys/hps/client/auth/resource/__init__.py b/src/ansys/hps/client/auth/resource/__init__.py similarity index 96% rename from ansys/hps/client/auth/resource/__init__.py rename to src/ansys/hps/client/auth/resource/__init__.py index f8888970..a0339907 100644 --- a/ansys/hps/client/auth/resource/__init__.py +++ b/src/ansys/hps/client/auth/resource/__init__.py @@ -19,6 +19,6 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS resource user subpackage.""" from .user import User diff --git a/ansys/hps/client/auth/resource/user.py b/src/ansys/hps/client/auth/resource/user.py similarity index 98% rename from ansys/hps/client/auth/resource/user.py rename to src/ansys/hps/client/auth/resource/user.py index 909fa785..fc4ff2d0 100644 --- a/ansys/hps/client/auth/resource/user.py +++ b/src/ansys/hps/client/auth/resource/user.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.user import UserSchema + class User(Object): """Provides the user resource. @@ -42,14 +44,14 @@ class User(Object): Last name. email : str, optional E-mail address. - """ class Meta: schema = UserSchema rest_name = "None" - def __init__(self, + def __init__( + self, id=missing, username=missing, password=missing, @@ -67,4 +69,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + UserSchema.Meta.object_class = User diff --git a/ansys/hps/client/auth/schema/__init__.py b/src/ansys/hps/client/auth/schema/__init__.py similarity index 97% rename from ansys/hps/client/auth/schema/__init__.py rename to src/ansys/hps/client/auth/schema/__init__.py index a95b9647..54defd6e 100644 --- a/ansys/hps/client/auth/schema/__init__.py +++ b/src/ansys/hps/client/auth/schema/__init__.py @@ -19,3 +19,4 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""PyHPS schema subpackage.""" diff --git a/ansys/hps/client/auth/schema/user.py b/src/ansys/hps/client/auth/schema/user.py similarity index 93% rename from ansys/hps/client/auth/schema/user.py rename to src/ansys/hps/client/auth/schema/user.py index a2bcbc27..50f5e59c 100644 --- a/ansys/hps/client/auth/schema/user.py +++ b/src/ansys/hps/client/auth/schema/user.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module that creates the user schema.""" from marshmallow import fields @@ -27,6 +27,8 @@ class UserSchema(BaseSchema): + """Create user schema with ID, username, password, first name, last name, and email.""" + class Meta(BaseSchema.Meta): pass diff --git a/ansys/hps/client/client.py b/src/ansys/hps/client/client.py similarity index 99% rename from ansys/hps/client/client.py rename to src/ansys/hps/client/client.py index fe448505..294698ef 100644 --- a/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the Python client to the HPS APIs.""" import logging from typing import Union diff --git a/ansys/hps/client/common/__init__.py b/src/ansys/hps/client/common/__init__.py similarity index 97% rename from ansys/hps/client/common/__init__.py rename to src/ansys/hps/client/common/__init__.py index 8f61ae36..2fb5a5ae 100644 --- a/ansys/hps/client/common/__init__.py +++ b/src/ansys/hps/client/common/__init__.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS common subpackage.""" from .base_resource import Object from .base_schema import BaseSchema, ObjectSchema, ObjectSchemaWithModificationInfo from .restricted_value import RestrictedValue diff --git a/ansys/hps/client/common/base_resource.py b/src/ansys/hps/client/common/base_resource.py similarity index 94% rename from ansys/hps/client/common/base_resource.py rename to src/ansys/hps/client/common/base_resource.py index f4ecb8aa..d952569f 100644 --- a/ansys/hps/client/common/base_resource.py +++ b/src/ansys/hps/client/common/base_resource.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module processing class members for an object.""" import json import logging @@ -64,12 +64,14 @@ def __init__(self, **kwargs): setattr(self, k, missing) def __repr__(self): + """Printable representation of the object.""" return "%s(%s)" % ( self.__class__.__name__, ",".join(["%s=%r" % (k, getattr(self, k)) for k in self.declared_fields()]), ) def __eq__(self, other): + """Compare instances of the object.""" if not isinstance(other, self.__class__): return NotImplemented for k in self.declared_fields(): @@ -78,7 +80,7 @@ def __eq__(self, other): return True def __str__(self): - + """String representation of the object.""" # Ideally we'd simply do # return json.dumps(self.Meta.schema(many=False).dump(self), indent=2) # However the schema.dump() function (rightfully) ignores fields marked as load_only. diff --git a/ansys/hps/client/common/base_schema.py b/src/ansys/hps/client/common/base_schema.py similarity index 89% rename from ansys/hps/client/common/base_schema.py rename to src/ansys/hps/client/common/base_schema.py index d4e7efb0..431c1742 100644 --- a/ansys/hps/client/common/base_schema.py +++ b/src/ansys/hps/client/common/base_schema.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing base schemas and object schemas with and without modification information.""" from marshmallow import INCLUDE, Schema, fields, post_load class BaseSchema(Schema): + """Base schema class.""" + class Meta: ordered = True unknown = INCLUDE @@ -32,10 +34,12 @@ class Meta: @post_load def make_object(self, data, **kwargs): + """Make object for base schema.""" return self.Meta.object_class(**data) class ObjectSchema(BaseSchema): + """Create object schema with ID.""" id = fields.String( allow_none=True, @@ -48,6 +52,7 @@ class ObjectSchema(BaseSchema): class ObjectSchemaWithModificationInfo(ObjectSchema): + """Object schema with creation & modification times, and created & modified by fields.""" creation_time = fields.DateTime( allow_none=True, diff --git a/ansys/hps/client/common/restricted_value.py b/src/ansys/hps/client/common/restricted_value.py similarity index 89% rename from ansys/hps/client/common/restricted_value.py rename to src/ansys/hps/client/common/restricted_value.py index 7e60a24d..64ce4388 100644 --- a/ansys/hps/client/common/restricted_value.py +++ b/src/ansys/hps/client/common/restricted_value.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing restricted value fields.""" from marshmallow import fields from marshmallow.exceptions import ValidationError class RestrictedValue(fields.Field): + """Restricted value fields.""" + restricted_fields = [ fields.Int(strict=True), fields.Bool(truthy=[True], falsy=[False]), @@ -36,6 +38,7 @@ def __init__(self): super().__init__(allow_none=True) def _deserialize(self, value, attr, obj, **kwargs): + """Convert string to restricted value object.""" for field in self.restricted_fields: try: return field._deserialize(value, attr, obj, **kwargs) @@ -45,4 +48,5 @@ def _deserialize(self, value, attr, obj, **kwargs): self.raise_validation_error() def raise_validation_error(): + """Raise validation error if value is not a float, integer, Boolean, or string.""" raise ValidationError("Value must be a float, integer, Boolean, or string.") diff --git a/ansys/hps/client/connection.py b/src/ansys/hps/client/connection.py similarity index 96% rename from ansys/hps/client/connection.py rename to src/ansys/hps/client/connection.py index 3b9d67b8..c5ad824a 100644 --- a/ansys/hps/client/connection.py +++ b/src/ansys/hps/client/connection.py @@ -19,7 +19,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module getting the :class:`requests.Session` object configured for HPS with a given access token +and pinging a given URL.""" import logging from typing import Union diff --git a/ansys/hps/client/exceptions.py b/src/ansys/hps/client/exceptions.py similarity index 97% rename from ansys/hps/client/exceptions.py rename to src/ansys/hps/client/exceptions.py index 30bf152c..5a274e09 100644 --- a/ansys/hps/client/exceptions.py +++ b/src/ansys/hps/client/exceptions.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the base class for all client and server REP-related errors.""" from requests.exceptions import RequestException diff --git a/ansys/hps/client/jms/__init__.py b/src/ansys/hps/client/jms/__init__.py similarity index 98% rename from ansys/hps/client/jms/__init__.py rename to src/ansys/hps/client/jms/__init__.py index ba22858a..38421490 100644 --- a/ansys/hps/client/jms/__init__.py +++ b/src/ansys/hps/client/jms/__init__.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS JMS subpackage.""" from .api import JmsApi, ProjectApi from .resource import ( diff --git a/ansys/hps/client/jms/api/__init__.py b/src/ansys/hps/client/jms/api/__init__.py similarity index 97% rename from ansys/hps/client/jms/api/__init__.py rename to src/ansys/hps/client/jms/api/__init__.py index 66109716..8fee0bae 100644 --- a/ansys/hps/client/jms/api/__init__.py +++ b/src/ansys/hps/client/jms/api/__init__.py @@ -19,6 +19,6 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS JMS API submodule.""" from .jms_api import JmsApi from .project_api import ProjectApi diff --git a/ansys/hps/client/jms/api/base.py b/src/ansys/hps/client/jms/api/base.py similarity index 93% rename from ansys/hps/client/jms/api/base.py rename to src/ansys/hps/client/jms/api/base.py index e36e79bc..7fd15f0e 100644 --- a/ansys/hps/client/jms/api/base.py +++ b/src/ansys/hps/client/jms/api/base.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module creating, getting, updating, deleting and copying objects.""" import json import logging from typing import List, Type @@ -35,7 +35,7 @@ def get_objects( session: Session, url: str, obj_type: Type[Object], as_objects=True, **query_params ): - + """Get objects with a session, URL, and object type.""" rest_name = obj_type.Meta.rest_name url = f"{url}/{rest_name}" r = session.get(url, params=query_params) @@ -54,7 +54,7 @@ def get_objects( def get_object( session: Session, url: str, obj_type: Type[Object], id: str, as_object=True, **query_params ): - + """Get an object with a session, URL, object type, and object.""" rest_name = obj_type.Meta.rest_name url = f"{url}/{rest_name}/{id}" r = session.get(url, params=query_params) @@ -75,7 +75,7 @@ def get_object( def _check_object_types(objects: List[Object], obj_type: Type[Object]): - + """Check object types.""" are_same = [isinstance(o, obj_type) for o in objects] if not all(are_same): actual_types = set([type(o) for o in objects]) @@ -92,6 +92,7 @@ def create_objects( as_objects=True, **query_params, ): + """Create objects.""" if not objects: return [] @@ -120,7 +121,7 @@ def update_objects( as_objects=True, **query_params, ): - + """Update objects.""" if not objects: return [] @@ -142,7 +143,7 @@ def update_objects( def delete_objects(session: Session, url: str, objects: List[Object], obj_type: Type[Object]): - + """Delete objects.""" if not objects: return @@ -157,7 +158,7 @@ def delete_objects(session: Session, url: str, objects: List[Object], obj_type: def copy_objects(session: Session, url: str, objects: List[Object], wait: bool = True) -> str: - + """Copy objects.""" are_same = [o.__class__ == objects[0].__class__ for o in objects[1:]] if not all(are_same): raise ClientError("Mixed object types") diff --git a/ansys/hps/client/jms/api/jms_api.py b/src/ansys/hps/client/jms/api/jms_api.py similarity index 97% rename from ansys/hps/client/jms/api/jms_api.py rename to src/ansys/hps/client/jms/api/jms_api.py index fcd8bc6a..c97218f3 100644 --- a/ansys/hps/client/jms/api/jms_api.py +++ b/src/ansys/hps/client/jms/api/jms_api.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module wrapping around the JMS root endpoints.""" import json import logging import os @@ -65,6 +65,7 @@ class JmsApi(object): """ def __init__(self, client: Client): + """Initialize JMS API.""" self.client = client self._fs_url = None @@ -247,11 +248,13 @@ def update_task_definition_template_permissions( ################################################################ # Operations def get_operations(self, as_objects=True, **query_params) -> List[Operation]: + """Get operations.""" return get_objects( self.client.session, self.url, Operation, as_objects=as_objects, **query_params ) def get_operation(self, id, as_object=True) -> Operation: + """Get an operation.""" return get_object(self.client.session, self.url, Operation, id, as_object=as_object) def monitor_operation(self, operation_id: str, max_value: float = 5.0, max_time: float = None): @@ -322,6 +325,7 @@ def get_project_by_name(client, api_url, name, last_created=True) -> Union[Proje def create_project(client, api_url, project, replace=False, as_objects=True) -> Project: + """Create a project.""" url = f"{api_url}/projects/" schema = ProjectSchema() @@ -340,6 +344,7 @@ def create_project(client, api_url, project, replace=False, as_objects=True) -> def update_project(client, api_url, project, as_objects=True) -> Project: + """Update a project.""" url = f"{api_url}/projects/{project.id}" schema = ProjectSchema() @@ -355,7 +360,7 @@ def update_project(client, api_url, project, as_objects=True) -> Project: def delete_project(client, api_url, project): - + """Delete a project.""" url = f"{api_url}/projects/{project.id}" r = client.session.delete(url) @@ -363,6 +368,8 @@ def delete_project(client, api_url, project): def _monitor_operation( jms_api: JmsApi, operation_id: str, max_value: float = 5.0, max_time: float = None ) -> Operation: + """Monitor an operation.""" + @backoff.on_predicate( backoff.expo, lambda x: x[1] == False, @@ -371,6 +378,7 @@ def _monitor_operation( max_time=max_time, ) def _monitor(): + """Monitor the operation with its ID.""" done = False op = jms_api.get_operation(id=operation_id) if op: @@ -387,7 +395,7 @@ def _monitor(): def _copy_objects( client: Client, api_url: str, objects: List[Object], wait: bool = True ) -> Union[str, List[str]]: - + """Copy objects.""" operation_id = base_copy_objects(client.session, api_url, objects) if not wait: @@ -402,7 +410,7 @@ def _copy_objects( def restore_project(jms_api, archive_path): - + """Restore an archived project.""" if not os.path.exists(archive_path): raise HPSError(f"Project archive: path does not exist {archive_path}") @@ -456,7 +464,7 @@ def get_storages(client, api_url): def get_fs_url(client, api_url): - + """Get the file storage URL.""" file_storages = get_storages(client, api_url) if not file_storages: diff --git a/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py similarity index 92% rename from ansys/hps/client/jms/api/project_api.py rename to src/ansys/hps/client/jms/api/project_api.py index bd5cfef9..229396dc 100644 --- a/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module exposing the project endpoints of the JMS.""" import json import logging import os @@ -95,6 +95,7 @@ class ProjectApi: """ def __init__(self, client: Client, project_id: str): + """Initialize project API.""" self.client = client self.project_id = project_id self._fs_url = None @@ -102,6 +103,7 @@ def __init__(self, client: Client, project_id: str): @property def jms_api_url(self) -> str: + """Get the JMS API URL.""" return f"{self.client.url}/jms/api/v1" @property @@ -160,12 +162,15 @@ def get_files(self, as_objects=True, content=False, **query_params) -> List[File return get_files(self, as_objects=as_objects, content=content, **query_params) def create_files(self, files: List[File], as_objects=True) -> List[File]: + """Create a list of files.""" return create_files(self, files, as_objects=as_objects) def update_files(self, files: List[File], as_objects=True): + """Update files.""" return update_files(self, files, as_objects=as_objects) def delete_files(self, files: List[File]): + """Delete files.""" return self._delete_objects(files, File) def download_file( @@ -188,55 +193,67 @@ def download_file( def get_parameter_definitions( self, as_objects=True, **query_params ) -> List[ParameterDefinition]: + """Get a list of parameter definitions.""" return self._get_objects(ParameterDefinition, as_objects, **query_params) def create_parameter_definitions( self, parameter_definitions: List[ParameterDefinition], as_objects=True ) -> List[ParameterDefinition]: + """Create a list of parameter definitions.""" return self._create_objects(parameter_definitions, ParameterDefinition, as_objects) def update_parameter_definitions( self, parameter_definitions: List[ParameterDefinition], as_objects=True ) -> List[ParameterDefinition]: + """Update a list of parameter definitions.""" return self._update_objects(parameter_definitions, ParameterDefinition, as_objects) def delete_parameter_definitions(self, parameter_definitions: List[ParameterDefinition]): + """Delete a list of parameter definitions.""" return self._delete_objects(parameter_definitions, ParameterDefinition) ################################################################ # Parameter mappings def get_parameter_mappings(self, as_objects=True, **query_params) -> List[ParameterMapping]: + """Get a list of parameter mappings.""" return self._get_objects(ParameterMapping, as_objects=as_objects, **query_params) def create_parameter_mappings( self, parameter_mappings: List[ParameterMapping], as_objects=True ) -> List[ParameterMapping]: + """Get a list of created parameter mappings.""" return self._create_objects(parameter_mappings, ParameterMapping, as_objects=as_objects) def update_parameter_mappings( self, parameter_mappings: List[ParameterMapping], as_objects=True ) -> List[ParameterMapping]: + """Get a list of updated parameter mappings.""" return self._update_objects(parameter_mappings, ParameterMapping, as_objects=as_objects) def delete_parameter_mappings(self, parameter_mappings: List[ParameterMapping]): + """Delete a list of parameter mappings.""" return self._delete_objects(parameter_mappings, ParameterMapping) ################################################################ # Task definitions def get_task_definitions(self, as_objects=True, **query_params) -> List[TaskDefinition]: + """Get a list of task definitions.""" return self._get_objects(TaskDefinition, as_objects=as_objects, **query_params) def create_task_definitions( self, task_definitions: List[TaskDefinition], as_objects=True ) -> List[TaskDefinition]: + """Create a list of task definitions.""" return self._create_objects(task_definitions, TaskDefinition, as_objects=as_objects) def update_task_definitions( self, task_definitions: List[TaskDefinition], as_objects=True ) -> List[TaskDefinition]: + """Update a list of task definitions.""" return self._update_objects(task_definitions, TaskDefinition, as_objects=as_objects) def delete_task_definitions(self, task_definitions: List[TaskDefinition]): + """Delete a list of task definitions.""" return self._delete_objects(task_definitions, TaskDefinition) def copy_task_definitions( @@ -264,19 +281,23 @@ def copy_task_definitions( ################################################################ # Job definitions def get_job_definitions(self, as_objects=True, **query_params) -> List[JobDefinition]: + """Get a list of job definitions.""" return self._get_objects(JobDefinition, as_objects=as_objects, **query_params) def create_job_definitions( self, job_definitions: List[JobDefinition], as_objects=True ) -> List[JobDefinition]: + """Create a list of job definitions.""" return self._create_objects(job_definitions, JobDefinition, as_objects=as_objects) def update_job_definitions( self, job_definitions: List[JobDefinition], as_objects=True ) -> List[JobDefinition]: + """Update a list of job definitions.""" return self._update_objects(job_definitions, JobDefinition, as_objects=as_objects) def delete_job_definitions(self, job_definitions: List[JobDefinition]): + """Delete a list of job definitions.""" return self._delete_objects(job_definitions, JobDefinition) def copy_job_definitions( @@ -304,6 +325,7 @@ def copy_job_definitions( ################################################################ # Jobs def get_jobs(self, as_objects=True, **query_params) -> List[Job]: + """Get a list of jobs.""" return self._get_objects(Job, as_objects=as_objects, **query_params) def create_jobs(self, jobs: List[Job], as_objects=True) -> List[Job]: @@ -384,9 +406,11 @@ def delete_jobs(self, jobs: List[Job]): return self._delete_objects(jobs, Job) def sync_jobs(self, jobs: List[Job]): + """Sync a list of jobs.""" return sync_jobs(self, jobs) def _sync_jobs(self, jobs: List[Job]): + """Deprecated function that syncs a list of jobs.""" msg = ( "'ProjectApi._sync_jobs' is deprecated and is to be removed soon. " "Use 'ProjectApi.sync_jobs' instead." @@ -398,57 +422,71 @@ def _sync_jobs(self, jobs: List[Job]): ################################################################ # Tasks def get_tasks(self, as_objects=True, **query_params) -> List[Task]: + """Get a list of tasks.""" return self._get_objects(Task, as_objects=as_objects, **query_params) def update_tasks(self, tasks: List[Task], as_objects=True) -> List[Task]: + """Update a list of tasks.""" return self._update_objects(tasks, Task, as_objects=as_objects) ################################################################ # Selections def get_job_selections(self, as_objects=True, **query_params) -> List[JobSelection]: + """Get a list of job selections.""" return self._get_objects(JobSelection, as_objects=as_objects, **query_params) def create_job_selections( self, selections: List[JobSelection], as_objects=True ) -> List[JobSelection]: + """Create a list of job selections.""" return self._create_objects(selections, JobSelection, as_objects=as_objects) def update_job_selections( self, selections: List[JobSelection], as_objects=True ) -> List[JobSelection]: + """Update a list of job selections.""" return self._update_objects(selections, JobSelection, as_objects=as_objects) def delete_job_selections(self, selections: List[JobSelection]): + """Delete a list of job selections.""" return self._delete_objects(selections, JobSelection) ################################################################ # Algorithms def get_algorithms(self, as_objects=True, **query_params) -> List[Algorithm]: + """Get a list of algorithms.""" return self._get_objects(Algorithm, as_objects=as_objects, **query_params) def create_algorithms(self, algorithms: List[Algorithm], as_objects=True) -> List[Algorithm]: + """Create a list of algorithms.""" return self._create_objects(algorithms, Algorithm, as_objects=as_objects) def update_algorithms(self, algorithms: List[Algorithm], as_objects=True) -> List[Algorithm]: + """Update a list of algorithms.""" return self._update_objects(algorithms, Algorithm, as_objects=as_objects) def delete_algorithms(self, algorithms: List[Algorithm]): + """Delete a list of algorithms.""" return self._delete_objects(algorithms, Algorithm) ################################################################ # Permissions def get_permissions(self, as_objects=True) -> List[Permission]: + """Get a list of permissions.""" return self._get_objects(Permission, as_objects=as_objects, fields=None) def update_permissions(self, permissions: List[Permission], as_objects=True): + """Update a list of permissions.""" return self._update_objects(permissions, Permission, as_objects=as_objects) ################################################################ # License contexts def get_license_contexts(self, as_objects=True, **query_params) -> List[LicenseContext]: + """Get a list of license contexts.""" return self._get_objects(self, LicenseContext, as_objects=as_objects, **query_params) def create_license_contexts(self, as_objects=True) -> List[LicenseContext]: + """Create a list of license contexts.""" rest_name = LicenseContext.Meta.rest_name url = f"{self.jms_api_url}/projects/{self.project_id}/{rest_name}" r = self.client.session.post(f"{url}") @@ -460,9 +498,11 @@ def create_license_contexts(self, as_objects=True) -> List[LicenseContext]: return objects def update_license_contexts(self, license_contexts, as_objects=True) -> List[LicenseContext]: + """Update a list of license contexts.""" return self._update_objects(self, license_contexts, LicenseContext, as_objects=as_objects) def delete_license_contexts(self): + """Delete license contexts.""" rest_name = LicenseContext.Meta.rest_name url = f"{self.jms_api_url}/projects/{self.id}/{rest_name}" r = self.client.session.delete(url) @@ -503,11 +543,13 @@ def copy_default_execution_script(self, filename: str) -> File: ################################################################ def _get_objects(self, obj_type: Object, as_objects=True, **query_params): + """Get objects.""" return get_objects(self.client.session, self.url, obj_type, as_objects, **query_params) def _create_objects( self, objects: List[Object], obj_type: Type[Object], as_objects=True, **query_params ): + """Create objects.""" return create_objects( self.client.session, self.url, objects, obj_type, as_objects, **query_params ) @@ -515,11 +557,13 @@ def _create_objects( def _update_objects( self, objects: List[Object], obj_type: Type[Object], as_objects=True, **query_params ): + """Update objects.""" return update_objects( self.client.session, self.url, objects, obj_type, as_objects, **query_params ) def _delete_objects(self, objects: List[Object], obj_type: Type[Object]): + """Delete objects.""" delete_objects(self.client.session, self.url, objects, obj_type) @@ -539,7 +583,7 @@ def _download_files(project_api: ProjectApi, files: List[File]): def get_files(project_api: ProjectApi, as_objects=True, content=False, **query_params): - + """Get files for the project API.""" files = get_objects( project_api.client.session, project_api.url, File, as_objects=as_objects, **query_params ) @@ -579,6 +623,7 @@ def _upload_files(project_api: ProjectApi, files): def create_files(project_api: ProjectApi, files, as_objects=True) -> List[File]: + """Create a list of files.""" # (1) Create file resources in JMS created_files = create_objects( project_api.client.session, project_api.url, files, File, as_objects=as_objects @@ -605,6 +650,7 @@ def create_files(project_api: ProjectApi, files, as_objects=True) -> List[File]: def update_files(project_api: ProjectApi, files: List[File], as_objects=True) -> List[File]: + """Update a list of files.""" # Upload files first if there are any src parameters _upload_files(project_api, files) # Update file resources in JMS @@ -620,7 +666,7 @@ def _download_file( progress_handler: Callable[[int], None] = None, stream: bool = True, ) -> str: - + """Download a file.""" if getattr(file, "hash", None) is None: log.warning(f"No hash found for file {file.name}.") @@ -642,7 +688,7 @@ def _download_file( def copy_projects( project_api: ProjectApi, project_source_ids: List[str], wait: bool = True ) -> Union[str, List[str]]: - + """Copy projects.""" return _copy_objects( project_api.client, project_api.jms_api_url, @@ -652,7 +698,7 @@ def copy_projects( def archive_project(project_api: ProjectApi, target_path, include_job_files=True) -> str: - + """Archive projects.""" # PUT archive request url = f"{project_api.url}/archive" query_params = {} @@ -702,7 +748,7 @@ def copy_jobs(project_api: ProjectApi, jobs: List[Job], as_objects=True, **query def sync_jobs(project_api: ProjectApi, jobs: List[Job]): - + """Sync jobs.""" url = f"{project_api.url}/jobs:sync" # noqa: E231 json_data = json.dumps({"job_ids": [obj.id for obj in jobs]}) r = project_api.client.session.put(f"{url}", data=json_data) @@ -716,7 +762,7 @@ def _fs_copy_file( destination_bucket: str, destination_name: str, ) -> str: - + """Copy files with the fs REST gateway.""" json_data = json.dumps( {"destination": f"ansfs://{destination_bucket}/{destination_name}"} # noqa: E231 ) diff --git a/ansys/hps/client/jms/keys.py b/src/ansys/hps/client/jms/keys.py similarity index 96% rename from ansys/hps/client/jms/keys.py rename to src/ansys/hps/client/jms/keys.py index 89d0ad6f..24ad1cc9 100644 --- a/ansys/hps/client/jms/keys.py +++ b/src/ansys/hps/client/jms/keys.py @@ -19,5 +19,5 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the object ID key.""" OBJECT_ID_KEY = "id" diff --git a/ansys/hps/client/jms/resource/.gitignore b/src/ansys/hps/client/jms/resource/.gitignore similarity index 100% rename from ansys/hps/client/jms/resource/.gitignore rename to src/ansys/hps/client/jms/resource/.gitignore diff --git a/ansys/hps/client/jms/resource/__init__.py b/src/ansys/hps/client/jms/resource/__init__.py similarity index 75% rename from ansys/hps/client/jms/resource/__init__.py rename to src/ansys/hps/client/jms/resource/__init__.py index 82f5c743..80c7c525 100644 --- a/ansys/hps/client/jms/resource/__init__.py +++ b/src/ansys/hps/client/jms/resource/__init__.py @@ -19,8 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - - +"""PyHPS JMS resource subpackage.""" from .algorithm import Algorithm from .evaluator import Evaluator, EvaluatorConfigurationUpdate from .file import File @@ -29,12 +28,30 @@ from .job_definition import JobDefinition from .license_context import LicenseContext from .operation import Operation -from .parameter_definition import ParameterDefinition, FloatParameterDefinition, \ - IntParameterDefinition, StringParameterDefinition, BoolParameterDefinition +from .parameter_definition import ( + BoolParameterDefinition, + FloatParameterDefinition, + IntParameterDefinition, + ParameterDefinition, + StringParameterDefinition, +) from .parameter_mapping import ParameterMapping -from .project import Project from .permission import Permission +from .project import Project from .selection import JobSelection from .task import Task -from .task_definition import Licensing, SuccessCriteria, Software, ResourceRequirements, TaskDefinition, HpcResources -from .task_definition_template import TaskDefinitionTemplate, TemplateInputFile, TemplateOutputFile, TemplateResourceRequirements, TemplateProperty +from .task_definition import ( + HpcResources, + Licensing, + ResourceRequirements, + Software, + SuccessCriteria, + TaskDefinition, +) +from .task_definition_template import ( + TaskDefinitionTemplate, + TemplateInputFile, + TemplateOutputFile, + TemplateProperty, + TemplateResourceRequirements, +) diff --git a/ansys/hps/client/jms/resource/algorithm.py b/src/ansys/hps/client/jms/resource/algorithm.py similarity index 97% rename from ansys/hps/client/jms/resource/algorithm.py rename to src/ansys/hps/client/jms/resource/algorithm.py index 837b2e2a..8faf5683 100644 --- a/ansys/hps/client/jms/resource/algorithm.py +++ b/src/ansys/hps/client/jms/resource/algorithm.py @@ -19,12 +19,15 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the algorithm resource.""" # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.algorithm import AlgorithmSchema + class Algorithm(Object): """Provides the algorithm resource. @@ -55,7 +58,8 @@ class Meta: schema = AlgorithmSchema rest_name = "algorithms" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -79,4 +83,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + AlgorithmSchema.Meta.object_class = Algorithm diff --git a/ansys/hps/client/jms/resource/evaluator.py b/src/ansys/hps/client/jms/resource/evaluator.py similarity index 96% rename from ansys/hps/client/jms/resource/evaluator.py rename to src/ansys/hps/client/jms/resource/evaluator.py index c221f5c5..c45f44ec 100644 --- a/ansys/hps/client/jms/resource/evaluator.py +++ b/src/ansys/hps/client/jms/resource/evaluator.py @@ -19,12 +19,13 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object -from ..schema.evaluator import EvaluatorConfigurationUpdateSchema -from ..schema.evaluator import EvaluatorSchema + +from ..schema.evaluator import EvaluatorConfigurationUpdateSchema, EvaluatorSchema + class EvaluatorConfigurationUpdate(Object): """Provides for updating an evaluator configuration resource. @@ -50,7 +51,8 @@ class Meta: schema = EvaluatorConfigurationUpdateSchema rest_name = "None" - def __init__(self, + def __init__( + self, id=missing, name=missing, applications=missing, @@ -76,8 +78,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + EvaluatorConfigurationUpdateSchema.Meta.object_class = EvaluatorConfigurationUpdate + class Evaluator(Object): """Provides the evaluator resource. @@ -86,7 +90,8 @@ class Evaluator(Object): id : str, optional Unique ID to access the resource, generated internally by the server on creation. host_id : str - Unique ID built from hardware information and the selected configuration information of the evaluator. + Unique ID built from hardware information and the selected configuration information of the + evaluator. name : str, optional Name of the evaluator. hostname : str, optional @@ -98,7 +103,8 @@ class Evaluator(Object): task_manager_type : str, optional Type of the task manager used by the evaluator. project_server_select : bool, optional - Whether the evaluator allows server-driven assignment of projects or uses its own local settings. + Whether the evaluator allows server-driven assignment of projects or uses its own + local settings. alive_update_interval : int, optional Minimal time in seconds between evaluator registration updates. update_time : datetime, optional @@ -122,7 +128,8 @@ class Meta: schema = EvaluatorSchema rest_name = "evaluators" - def __init__(self, + def __init__( + self, id=missing, host_id=missing, name=missing, @@ -160,4 +167,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + EvaluatorSchema.Meta.object_class = Evaluator diff --git a/ansys/hps/client/jms/resource/file.py b/src/ansys/hps/client/jms/resource/file.py similarity index 97% rename from ansys/hps/client/jms/resource/file.py rename to src/ansys/hps/client/jms/resource/file.py index 77efe642..2c5c2a31 100644 --- a/ansys/hps/client/jms/resource/file.py +++ b/src/ansys/hps/client/jms/resource/file.py @@ -19,14 +19,13 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the file resource.""" from marshmallow.utils import missing -from typing import Union -import io -from ..schema.file import FileSchema from ansys.hps.client.common import Object +from ..schema.file import FileSchema + class File(Object): """Provides the file resource. @@ -36,7 +35,7 @@ class File(Object): src : Union[str, io.IOBase], optional Client-only field to specify either the path of an input file or a file-like object. In the latter case, `requests` recommends that - you open files in binary mode. + you open files in binary mode. id : str, optional Unique ID to access the resource, generated internally by the server on creation. name : str @@ -77,7 +76,8 @@ class Meta: schema = FileSchema rest_name = "files" - def __init__(self, + def __init__( + self, src=None, id=missing, name=missing, @@ -121,4 +121,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + FileSchema.Meta.object_class = File diff --git a/ansys/hps/client/jms/resource/fitness_definition.py b/src/ansys/hps/client/jms/resource/fitness_definition.py similarity index 97% rename from ansys/hps/client/jms/resource/fitness_definition.py rename to src/ansys/hps/client/jms/resource/fitness_definition.py index 91dca859..2f51c1da 100644 --- a/ansys/hps/client/jms/resource/fitness_definition.py +++ b/src/ansys/hps/client/jms/resource/fitness_definition.py @@ -19,14 +19,15 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the fitness definition and fitness term definition resources.""" import logging from marshmallow.utils import missing -from ..schema.fitness_definition import FitnessDefinitionSchema, FitnessTermDefinitionSchema from ansys.hps.client.common import Object +from ..schema.fitness_definition import FitnessDefinitionSchema, FitnessTermDefinitionSchema + log = logging.getLogger(__name__) @@ -37,7 +38,7 @@ class FitnessTermDefinition(Object): ---------- id : str, optional Unique ID to access the resource, generated internally by the server on creation. - name : str, optional + name : str, optional Name of the fitness term. expression : str, optional Python expression that defines the fitness term. @@ -50,16 +51,16 @@ class FitnessTermDefinition(Object): -------- Create a fitness term of the objective type. - + >>> ft1 = FitnessTermDefinition( ... name="weight", ... type="design_objective", ... weighting_factor=1.0, ... expression="map_design_objective(values['weight'],7.5,5.5)" ... ) - + Create a fitness term of the target constraint type. - + >>> ft2 = FitnessTermDefinition( ... name="torsional_stiffness", ... type="target_constraint", @@ -67,9 +68,9 @@ class FitnessTermDefinition(Object): ... expression="map_target_constraint( ... values['torsion_stiffness'], 1313.0, 5.0, 30.0)" ... ) - + Create a fitness term of the limit constraint type. - + >>> ft3 = FitnessTermDefinition( ... name="max_stress", ... type="limit_constraint", diff --git a/ansys/hps/client/jms/resource/job.py b/src/ansys/hps/client/jms/resource/job.py similarity index 99% rename from ansys/hps/client/jms/resource/job.py rename to src/ansys/hps/client/jms/resource/job.py index 5cad3b75..55150c3a 100644 --- a/ansys/hps/client/jms/resource/job.py +++ b/src/ansys/hps/client/jms/resource/job.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.job import JobSchema + class Job(Object): """Provides a job resource. @@ -74,7 +76,8 @@ class Meta: schema = JobSchema rest_name = "jobs" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -116,4 +119,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + JobSchema.Meta.object_class = Job diff --git a/ansys/hps/client/jms/resource/job_definition.py b/src/ansys/hps/client/jms/resource/job_definition.py similarity index 97% rename from ansys/hps/client/jms/resource/job_definition.py rename to src/ansys/hps/client/jms/resource/job_definition.py index 0b4cb542..29b499d5 100644 --- a/ansys/hps/client/jms/resource/job_definition.py +++ b/src/ansys/hps/client/jms/resource/job_definition.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.job_definition import JobDefinitionSchema + class JobDefinition(Object): """Provides the job definition resource. @@ -43,7 +45,8 @@ class JobDefinition(Object): name : str, optional Name of the job definition. active : bool - Whether this is the active job definition in the project where evaluators are evaluating pending jobs. + Whether this is the active job definition in the project where evaluators are evaluating + pending jobs. client_hash : str, optional parameter_definition_ids : list[str] List of parameter definition IDs. @@ -59,7 +62,8 @@ class Meta: schema = JobDefinitionSchema rest_name = "job_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -89,4 +93,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + JobDefinitionSchema.Meta.object_class = JobDefinition diff --git a/ansys/hps/client/jms/resource/license_context.py b/src/ansys/hps/client/jms/resource/license_context.py similarity index 94% rename from ansys/hps/client/jms/resource/license_context.py rename to src/ansys/hps/client/jms/resource/license_context.py index 6dba1234..abcb302d 100644 --- a/ansys/hps/client/jms/resource/license_context.py +++ b/src/ansys/hps/client/jms/resource/license_context.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.license_context import LicenseContextSchema + class LicenseContext(Object): """Provides the license context resource. @@ -41,14 +43,11 @@ class Meta: schema = LicenseContextSchema rest_name = "license_contexts" - def __init__(self, - context_id=missing, - environment=missing, - **kwargs - ): + def __init__(self, context_id=missing, environment=missing, **kwargs): self.context_id = context_id self.environment = environment self.obj_type = self.__class__.__name__ + LicenseContextSchema.Meta.object_class = LicenseContext diff --git a/ansys/hps/client/jms/resource/operation.py b/src/ansys/hps/client/jms/resource/operation.py similarity index 98% rename from ansys/hps/client/jms/resource/operation.py rename to src/ansys/hps/client/jms/resource/operation.py index b149463b..605c2fb9 100644 --- a/ansys/hps/client/jms/resource/operation.py +++ b/src/ansys/hps/client/jms/resource/operation.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.operation import OperationSchema + class Operation(Object): """Provides the operation resource. @@ -48,7 +50,8 @@ class Meta: schema = OperationSchema rest_name = "operations" - def __init__(self, + def __init__( + self, id=missing, name=missing, target=missing, @@ -76,4 +79,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + OperationSchema.Meta.object_class = Operation diff --git a/ansys/hps/client/jms/resource/parameter_definition.py b/src/ansys/hps/client/jms/resource/parameter_definition.py similarity index 95% rename from ansys/hps/client/jms/resource/parameter_definition.py rename to src/ansys/hps/client/jms/resource/parameter_definition.py index 38f6a713..c4c8ec36 100644 --- a/ansys/hps/client/jms/resource/parameter_definition.py +++ b/src/ansys/hps/client/jms/resource/parameter_definition.py @@ -19,15 +19,19 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object -from ..schema.parameter_definition import ParameterDefinitionSchema -from ..schema.parameter_definition import FloatParameterDefinitionSchema -from ..schema.parameter_definition import IntParameterDefinitionSchema -from ..schema.parameter_definition import BoolParameterDefinitionSchema -from ..schema.parameter_definition import StringParameterDefinitionSchema + +from ..schema.parameter_definition import ( + BoolParameterDefinitionSchema, + FloatParameterDefinitionSchema, + IntParameterDefinitionSchema, + ParameterDefinitionSchema, + StringParameterDefinitionSchema, +) + class ParameterDefinition(Object): """Provides the parameter definition resource. @@ -41,14 +45,14 @@ class Meta: schema = ParameterDefinitionSchema rest_name = "parameter_definitions" - def __init__(self, - **kwargs - ): + def __init__(self, **kwargs): self.obj_type = self.__class__.__name__ + ParameterDefinitionSchema.Meta.object_class = ParameterDefinition + class FloatParameterDefinition(ParameterDefinition): """Provides the float parameter definition resource. @@ -87,15 +91,16 @@ class FloatParameterDefinition(ParameterDefinition): cyclic : bool, optional Whether the parameter is cyclic. value_list : list, optional - List of allowed values. This parameter provides an alternative to specifying upper and lower limits. - + List of allowed values. This parameter provides an alternative to specifying + upper and lower limits. """ class Meta: schema = FloatParameterDefinitionSchema rest_name = "parameter_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -135,8 +140,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + FloatParameterDefinitionSchema.Meta.object_class = FloatParameterDefinition + class IntParameterDefinition(ParameterDefinition): """Provides the integer parameter definition resource. @@ -180,7 +187,8 @@ class Meta: schema = IntParameterDefinitionSchema rest_name = "parameter_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -218,8 +226,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + IntParameterDefinitionSchema.Meta.object_class = IntParameterDefinition + class BoolParameterDefinition(ParameterDefinition): """Provides the Boolean parameter definition resource. @@ -254,7 +264,8 @@ class Meta: schema = BoolParameterDefinitionSchema rest_name = "parameter_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -284,8 +295,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + BoolParameterDefinitionSchema.Meta.object_class = BoolParameterDefinition + class StringParameterDefinition(ParameterDefinition): """Provides the string parameter definition resource. @@ -322,7 +335,8 @@ class Meta: schema = StringParameterDefinitionSchema rest_name = "parameter_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -354,4 +368,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + StringParameterDefinitionSchema.Meta.object_class = StringParameterDefinition diff --git a/ansys/hps/client/jms/resource/parameter_mapping.py b/src/ansys/hps/client/jms/resource/parameter_mapping.py similarity index 99% rename from ansys/hps/client/jms/resource/parameter_mapping.py rename to src/ansys/hps/client/jms/resource/parameter_mapping.py index 2d96c014..0669a92a 100644 --- a/ansys/hps/client/jms/resource/parameter_mapping.py +++ b/src/ansys/hps/client/jms/resource/parameter_mapping.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.parameter_mapping import ParameterMappingSchema + class ParameterMapping(Object): """Provides the parameter mapping resource. @@ -64,7 +66,8 @@ class Meta: schema = ParameterMappingSchema rest_name = "parameter_mappings" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -110,4 +113,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + ParameterMappingSchema.Meta.object_class = ParameterMapping diff --git a/ansys/hps/client/jms/resource/permission.py b/src/ansys/hps/client/jms/resource/permission.py similarity index 93% rename from ansys/hps/client/jms/resource/permission.py rename to src/ansys/hps/client/jms/resource/permission.py index dcacf3dc..2ef8c182 100644 --- a/ansys/hps/client/jms/resource/permission.py +++ b/src/ansys/hps/client/jms/resource/permission.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.permission import PermissionSchema + class Permission(Object): """Provides the permission resource. @@ -44,12 +46,8 @@ class Meta: schema = PermissionSchema rest_name = "permissions" - def __init__(self, - permission_type=missing, - value_id=missing, - value_name=missing, - role=missing, - **kwargs + def __init__( + self, permission_type=missing, value_id=missing, value_name=missing, role=missing, **kwargs ): self.permission_type = permission_type self.value_id = value_id @@ -58,4 +56,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + PermissionSchema.Meta.object_class = Permission diff --git a/ansys/hps/client/jms/resource/project.py b/src/ansys/hps/client/jms/resource/project.py similarity index 98% rename from ansys/hps/client/jms/resource/project.py rename to src/ansys/hps/client/jms/resource/project.py index 36c0c178..fe91886c 100644 --- a/ansys/hps/client/jms/resource/project.py +++ b/src/ansys/hps/client/jms/resource/project.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.project import ProjectSchema + class Project(Object): """Provides the project resource. @@ -51,7 +53,8 @@ class Meta: schema = ProjectSchema rest_name = "projects" - def __init__(self, + def __init__( + self, id=missing, name=missing, active=missing, @@ -71,4 +74,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + ProjectSchema.Meta.object_class = Project diff --git a/ansys/hps/client/jms/resource/selection.py b/src/ansys/hps/client/jms/resource/selection.py similarity index 98% rename from ansys/hps/client/jms/resource/selection.py rename to src/ansys/hps/client/jms/resource/selection.py index 5bc9a4c2..6f857b9b 100644 --- a/ansys/hps/client/jms/resource/selection.py +++ b/src/ansys/hps/client/jms/resource/selection.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.selection import JobSelectionSchema + class JobSelection(Object): """Provides the job selection resource. @@ -52,7 +54,8 @@ class Meta: schema = JobSelectionSchema rest_name = "job_selections" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -74,4 +77,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + JobSelectionSchema.Meta.object_class = JobSelection diff --git a/ansys/hps/client/jms/resource/task.py b/src/ansys/hps/client/jms/resource/task.py similarity index 99% rename from ansys/hps/client/jms/resource/task.py rename to src/ansys/hps/client/jms/resource/task.py index 35a70827..c5069ef0 100644 --- a/ansys/hps/client/jms/resource/task.py +++ b/src/ansys/hps/client/jms/resource/task.py @@ -19,12 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object + from ..schema.task import TaskSchema + class Task(Object): """Provides the task resource. @@ -84,7 +86,8 @@ class Meta: schema = TaskSchema rest_name = "tasks" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -138,4 +141,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TaskSchema.Meta.object_class = Task diff --git a/ansys/hps/client/jms/resource/task_definition.py b/src/ansys/hps/client/jms/resource/task_definition.py similarity index 94% rename from ansys/hps/client/jms/resource/task_definition.py rename to src/ansys/hps/client/jms/resource/task_definition.py index 104b5dfb..4672cf6d 100644 --- a/ansys/hps/client/jms/resource/task_definition.py +++ b/src/ansys/hps/client/jms/resource/task_definition.py @@ -19,16 +19,20 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object -from ..schema.task_definition import HpcResourcesSchema -from ..schema.task_definition import ResourceRequirementsSchema -from ..schema.task_definition import SoftwareSchema -from ..schema.task_definition import SuccessCriteriaSchema -from ..schema.task_definition import LicensingSchema -from ..schema.task_definition import TaskDefinitionSchema + +from ..schema.task_definition import ( + HpcResourcesSchema, + LicensingSchema, + ResourceRequirementsSchema, + SoftwareSchema, + SuccessCriteriaSchema, + TaskDefinitionSchema, +) + class HpcResources(Object): """Provides the HPC resource. @@ -49,7 +53,8 @@ class Meta: schema = HpcResourcesSchema rest_name = "None" - def __init__(self, + def __init__( + self, num_cores_per_node=missing, num_gpus_per_node=missing, exclusive=missing, @@ -63,8 +68,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + HpcResourcesSchema.Meta.object_class = HpcResources + class ResourceRequirements(Object): """Provides the resource requirements resource. @@ -90,7 +97,8 @@ class Meta: schema = ResourceRequirementsSchema rest_name = "None" - def __init__(self, + def __init__( + self, platform=missing, memory=missing, num_cores=missing, @@ -110,8 +118,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + ResourceRequirementsSchema.Meta.object_class = ResourceRequirements + class Software(Object): """Provides the software resource. @@ -128,18 +138,16 @@ class Meta: schema = SoftwareSchema rest_name = "None" - def __init__(self, - name=missing, - version=missing, - **kwargs - ): + def __init__(self, name=missing, version=missing, **kwargs): self.name = name self.version = version self.obj_type = self.__class__.__name__ + SoftwareSchema.Meta.object_class = Software + class SuccessCriteria(Object): """Provides the success criteria resource. @@ -163,7 +171,8 @@ class Meta: schema = SuccessCriteriaSchema rest_name = "None" - def __init__(self, + def __init__( + self, return_code=missing, expressions=missing, required_output_file_ids=missing, @@ -181,8 +190,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + SuccessCriteriaSchema.Meta.object_class = SuccessCriteria + class Licensing(Object): """Provides the licensing resource. @@ -197,16 +208,15 @@ class Meta: schema = LicensingSchema rest_name = "None" - def __init__(self, - enable_shared_licensing=missing, - **kwargs - ): + def __init__(self, enable_shared_licensing=missing, **kwargs): self.enable_shared_licensing = enable_shared_licensing self.obj_type = self.__class__.__name__ + LicensingSchema.Meta.object_class = Licensing + class TaskDefinition(Object): """Provides the task definition resource. @@ -259,7 +269,8 @@ class Meta: schema = TaskDefinitionSchema rest_name = "task_definitions" - def __init__(self, + def __init__( + self, id=missing, creation_time=missing, modification_time=missing, @@ -307,4 +318,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TaskDefinitionSchema.Meta.object_class = TaskDefinition diff --git a/ansys/hps/client/jms/resource/task_definition_template.py b/src/ansys/hps/client/jms/resource/task_definition_template.py similarity index 93% rename from ansys/hps/client/jms/resource/task_definition_template.py rename to src/ansys/hps/client/jms/resource/task_definition_template.py index ad4ba556..73f44873 100644 --- a/ansys/hps/client/jms/resource/task_definition_template.py +++ b/src/ansys/hps/client/jms/resource/task_definition_template.py @@ -19,15 +19,19 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - # autogenerated code from marshmallow.utils import missing + from ansys.hps.client.common import Object -from ..schema.task_definition_template import TemplatePropertySchema -from ..schema.task_definition_template import TemplateResourceRequirementsSchema -from ..schema.task_definition_template import TemplateInputFileSchema -from ..schema.task_definition_template import TemplateOutputFileSchema -from ..schema.task_definition_template import TaskDefinitionTemplateSchema + +from ..schema.task_definition_template import ( + TaskDefinitionTemplateSchema, + TemplateInputFileSchema, + TemplateOutputFileSchema, + TemplatePropertySchema, + TemplateResourceRequirementsSchema, +) + class TemplateProperty(Object): """Provides the template property resource. @@ -48,12 +52,8 @@ class Meta: schema = TemplatePropertySchema rest_name = "None" - def __init__(self, - default=missing, - description=missing, - type=missing, - value_list=missing, - **kwargs + def __init__( + self, default=missing, description=missing, type=missing, value_list=missing, **kwargs ): self.default = default self.description = description @@ -62,8 +62,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TemplatePropertySchema.Meta.object_class = TemplateProperty + class TemplateResourceRequirements(Object): """Provides the template resource requirements resource. @@ -82,7 +84,8 @@ class Meta: schema = TemplateResourceRequirementsSchema rest_name = "None" - def __init__(self, + def __init__( + self, platform=missing, memory=missing, num_cores=missing, @@ -102,8 +105,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TemplateResourceRequirementsSchema.Meta.object_class = TemplateResourceRequirements + class TemplateInputFile(Object): """Provides the template input file resource. @@ -125,7 +130,8 @@ class Meta: schema = TemplateInputFileSchema rest_name = "None" - def __init__(self, + def __init__( + self, name=missing, type=missing, evaluation_path=missing, @@ -141,8 +147,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TemplateInputFileSchema.Meta.object_class = TemplateInputFile + class TemplateOutputFile(Object): """Provides the template output file resource. @@ -169,7 +177,8 @@ class Meta: schema = TemplateOutputFileSchema rest_name = "None" - def __init__(self, + def __init__( + self, name=missing, type=missing, evaluation_path=missing, @@ -189,8 +198,10 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TemplateOutputFileSchema.Meta.object_class = TemplateOutputFile + class TaskDefinitionTemplate(Object): """Provides the task definition template resource. @@ -234,7 +245,8 @@ class Meta: schema = TaskDefinitionTemplateSchema rest_name = "task_definition_templates" - def __init__(self, + def __init__( + self, id=missing, modification_time=missing, creation_time=missing, @@ -272,4 +284,5 @@ def __init__(self, self.obj_type = self.__class__.__name__ + TaskDefinitionTemplateSchema.Meta.object_class = TaskDefinitionTemplate diff --git a/ansys/hps/client/jms/schema/__init__.py b/src/ansys/hps/client/jms/schema/__init__.py similarity index 97% rename from ansys/hps/client/jms/schema/__init__.py rename to src/ansys/hps/client/jms/schema/__init__.py index a95b9647..5e72538d 100644 --- a/ansys/hps/client/jms/schema/__init__.py +++ b/src/ansys/hps/client/jms/schema/__init__.py @@ -19,3 +19,4 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""PyHPS JMS schema subpackage.""" diff --git a/ansys/hps/client/jms/schema/algorithm.py b/src/ansys/hps/client/jms/schema/algorithm.py similarity index 96% rename from ansys/hps/client/jms/schema/algorithm.py rename to src/ansys/hps/client/jms/schema/algorithm.py index 75b153d7..4e030410 100644 --- a/ansys/hps/client/jms/schema/algorithm.py +++ b/src/ansys/hps/client/jms/schema/algorithm.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing the algorithm schema with modification information.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/evaluator.py b/src/ansys/hps/client/jms/schema/evaluator.py similarity index 98% rename from ansys/hps/client/jms/schema/evaluator.py rename to src/ansys/hps/client/jms/schema/evaluator.py index b47576cb..87ca2507 100644 --- a/ansys/hps/client/jms/schema/evaluator.py +++ b/src/ansys/hps/client/jms/schema/evaluator.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing evaluator configuration update schema.""" import marshmallow from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/file.py b/src/ansys/hps/client/jms/schema/file.py similarity index 98% rename from ansys/hps/client/jms/schema/file.py rename to src/ansys/hps/client/jms/schema/file.py index 565f8fff..0cb98ea2 100644 --- a/ansys/hps/client/jms/schema/file.py +++ b/src/ansys/hps/client/jms/schema/file.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing file schema.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/fitness_definition.py b/src/ansys/hps/client/jms/schema/fitness_definition.py similarity index 97% rename from ansys/hps/client/jms/schema/fitness_definition.py rename to src/ansys/hps/client/jms/schema/fitness_definition.py index 2a2780f6..2037e5fc 100644 --- a/ansys/hps/client/jms/schema/fitness_definition.py +++ b/src/ansys/hps/client/jms/schema/fitness_definition.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing fitness term definition schema.""" from marshmallow import fields from marshmallow.validate import OneOf diff --git a/ansys/hps/client/jms/schema/job.py b/src/ansys/hps/client/jms/schema/job.py similarity index 99% rename from ansys/hps/client/jms/schema/job.py rename to src/ansys/hps/client/jms/schema/job.py index 09931201..a00a488d 100644 --- a/ansys/hps/client/jms/schema/job.py +++ b/src/ansys/hps/client/jms/schema/job.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing job schema.""" from marshmallow import fields from marshmallow.validate import OneOf diff --git a/ansys/hps/client/jms/schema/job_definition.py b/src/ansys/hps/client/jms/schema/job_definition.py similarity index 98% rename from ansys/hps/client/jms/schema/job_definition.py rename to src/ansys/hps/client/jms/schema/job_definition.py index da8a7a5b..5d0fa306 100644 --- a/ansys/hps/client/jms/schema/job_definition.py +++ b/src/ansys/hps/client/jms/schema/job_definition.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing job definition schema.""" import logging from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/license_context.py b/src/ansys/hps/client/jms/schema/license_context.py similarity index 97% rename from ansys/hps/client/jms/schema/license_context.py rename to src/ansys/hps/client/jms/schema/license_context.py index 0ca54beb..67d0d7c2 100644 --- a/ansys/hps/client/jms/schema/license_context.py +++ b/src/ansys/hps/client/jms/schema/license_context.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # @@ -20,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing license context schema.""" from marshmallow import fields from ansys.hps.client.common import BaseSchema diff --git a/ansys/hps/client/jms/schema/object_reference.py b/src/ansys/hps/client/jms/schema/object_reference.py similarity index 98% rename from ansys/hps/client/jms/schema/object_reference.py rename to src/ansys/hps/client/jms/schema/object_reference.py index 703577a3..e1a3cb53 100644 --- a/ansys/hps/client/jms/schema/object_reference.py +++ b/src/ansys/hps/client/jms/schema/object_reference.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module retrieving IDs and providing ID references.""" import logging from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/operation.py b/src/ansys/hps/client/jms/schema/operation.py similarity index 97% rename from ansys/hps/client/jms/schema/operation.py rename to src/ansys/hps/client/jms/schema/operation.py index 36798681..fa3f296b 100644 --- a/ansys/hps/client/jms/schema/operation.py +++ b/src/ansys/hps/client/jms/schema/operation.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing operation schema.""" from marshmallow import fields from ansys.hps.client.common import ObjectSchema diff --git a/ansys/hps/client/jms/schema/parameter_definition.py b/src/ansys/hps/client/jms/schema/parameter_definition.py similarity index 98% rename from ansys/hps/client/jms/schema/parameter_definition.py rename to src/ansys/hps/client/jms/schema/parameter_definition.py index 81a35429..4e0a20ec 100644 --- a/ansys/hps/client/jms/schema/parameter_definition.py +++ b/src/ansys/hps/client/jms/schema/parameter_definition.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing float, integer, boolean, and string parameter definition schema.""" import logging from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/parameter_mapping.py b/src/ansys/hps/client/jms/schema/parameter_mapping.py similarity index 98% rename from ansys/hps/client/jms/schema/parameter_mapping.py rename to src/ansys/hps/client/jms/schema/parameter_mapping.py index 9eb51e97..89ac7e8a 100644 --- a/ansys/hps/client/jms/schema/parameter_mapping.py +++ b/src/ansys/hps/client/jms/schema/parameter_mapping.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing parameter mapping schema.""" import logging from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/permission.py b/src/ansys/hps/client/jms/schema/permission.py similarity index 97% rename from ansys/hps/client/jms/schema/permission.py rename to src/ansys/hps/client/jms/schema/permission.py index bdaf9b1a..4c0fc9d8 100644 --- a/ansys/hps/client/jms/schema/permission.py +++ b/src/ansys/hps/client/jms/schema/permission.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing permission schema.""" from marshmallow import fields from ansys.hps.client.common import BaseSchema diff --git a/ansys/hps/client/jms/schema/project.py b/src/ansys/hps/client/jms/schema/project.py similarity index 98% rename from ansys/hps/client/jms/schema/project.py rename to src/ansys/hps/client/jms/schema/project.py index 2cb80a59..66660308 100644 --- a/ansys/hps/client/jms/schema/project.py +++ b/src/ansys/hps/client/jms/schema/project.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing project schema.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/selection.py b/src/ansys/hps/client/jms/schema/selection.py similarity index 97% rename from ansys/hps/client/jms/schema/selection.py rename to src/ansys/hps/client/jms/schema/selection.py index 4830ab5d..80badd68 100644 --- a/ansys/hps/client/jms/schema/selection.py +++ b/src/ansys/hps/client/jms/schema/selection.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing job selection schema.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/task.py b/src/ansys/hps/client/jms/schema/task.py similarity index 99% rename from ansys/hps/client/jms/schema/task.py rename to src/ansys/hps/client/jms/schema/task.py index 033fe8a5..e9d28f7b 100644 --- a/ansys/hps/client/jms/schema/task.py +++ b/src/ansys/hps/client/jms/schema/task.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing task schema.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/task_definition.py b/src/ansys/hps/client/jms/schema/task_definition.py similarity index 98% rename from ansys/hps/client/jms/schema/task_definition.py rename to src/ansys/hps/client/jms/schema/task_definition.py index 5537a306..acb1c60f 100644 --- a/ansys/hps/client/jms/schema/task_definition.py +++ b/src/ansys/hps/client/jms/schema/task_definition.py @@ -19,7 +19,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing software, HPC resource, resource requirement, success criteria, +licensing, and task definition schema.""" from marshmallow import fields diff --git a/ansys/hps/client/jms/schema/task_definition_template.py b/src/ansys/hps/client/jms/schema/task_definition_template.py similarity index 98% rename from ansys/hps/client/jms/schema/task_definition_template.py rename to src/ansys/hps/client/jms/schema/task_definition_template.py index 4918c3a4..c6005467 100644 --- a/ansys/hps/client/jms/schema/task_definition_template.py +++ b/src/ansys/hps/client/jms/schema/task_definition_template.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # @@ -20,7 +19,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module providing template property, resource requirements, file, input file, +output file, and task definition schema.""" from marshmallow import fields, validate from ansys.hps.client.common import BaseSchema, ObjectSchema diff --git a/ansys/hps/client/rms/__init__.py b/src/ansys/hps/client/rms/__init__.py similarity index 97% rename from ansys/hps/client/rms/__init__.py rename to src/ansys/hps/client/rms/__init__.py index 4aab3cc6..803c4b58 100644 --- a/ansys/hps/client/rms/__init__.py +++ b/src/ansys/hps/client/rms/__init__.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""PyHPS RMS subpackage.""" from .api.rms_api import RmsApi from .models import ( ComputeResourceSet, diff --git a/ansys/hps/client/rms/api/base.py b/src/ansys/hps/client/rms/api/base.py similarity index 98% rename from ansys/hps/client/rms/api/base.py rename to src/ansys/hps/client/rms/api/base.py index f417e232..da670b23 100644 --- a/ansys/hps/client/rms/api/base.py +++ b/src/ansys/hps/client/rms/api/base.py @@ -19,7 +19,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module getting, updating, creating, and deleting objects, and converting objects +to and from JSONs.""" import json import logging from typing import List, Type diff --git a/ansys/hps/client/rms/api/rms_api.py b/src/ansys/hps/client/rms/api/rms_api.py similarity index 99% rename from ansys/hps/client/rms/api/rms_api.py rename to src/ansys/hps/client/rms/api/rms_api.py index de423522..70e514d9 100644 --- a/ansys/hps/client/rms/api/rms_api.py +++ b/src/ansys/hps/client/rms/api/rms_api.py @@ -19,7 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. - +"""Module wrapping around RMS root endpoints.""" import logging from typing import List diff --git a/src/ansys/hps/client/rms/models.py b/src/ansys/hps/client/rms/models.py new file mode 100644 index 00000000..3f542b12 --- /dev/null +++ b/src/ansys/hps/client/rms/models.py @@ -0,0 +1,731 @@ +# Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +"""Module providing model information.""" +# generated by datamodel-codegen: +# filename: rms_openapi.json +# timestamp: 2024-01-15T16:13:17+00:00 + +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel, Field +from typing_extensions import Literal + + +class ApplicationInfo(BaseModel): + name: str = Field(..., description="Application name", title="Name") + version: str = Field(..., description="Application version", title="Version") + install_path: str = Field( + ..., description="Installation path of application", title="Install Path" + ) + executable: str = Field( + ..., description="Executable path to run application", title="Executable" + ) + environment: Optional[Dict[str, Any]] = Field( + None, description="Environment setup for the process", title="Environment" + ) + capabilities: Optional[List[str]] = Field( + None, description="Capabilities of this application", title="Capabilities" + ) + customization_hook: Optional[Dict[str, Any]] = Field( + None, + description="Details of a custom hook used to modify the configuration before \ + runs are performed", + title="Customization Hook", + ) + + +class EvaluatorTaskDirectoryCleanup(Enum): + always = "always" + on_success = "on_success" + never = "never" + + +class ContextUpdate(BaseModel): + custom: Optional[Dict[str, Optional[Union[int, bool, str, float]]]] = Field( + {}, description="Custom runtime properties.", title="Custom" + ) + + +class CrsCountResponse(BaseModel): + num_compute_resource_sets: Optional[int] = Field(0, title="Num Compute Resource Sets") + + +class TaskDirectoryCleanupEnum(Enum): + always = "always" + on_success = "on_success" + never = "never" + + +class EvaluatorRegistration(BaseModel): + id: Optional[str] = Field(None, description="Unique ID for this worker.", title="Id") + name: Optional[str] = Field( + None, description="User-defined name for this worker.", title="Name" + ) + last_modified: Optional[datetime] = Field( + None, + description="Date and time when the registration was last modified.", + title="Last Modified", + ) + host_id: Optional[str] = Field( + None, description="Static hardware and configuration-based UUID.", title="Host Id" + ) + host_name: Optional[str] = Field( + None, description="Name of the host that the worker is running on.", title="Host Name" + ) + username: Optional[str] = Field( + None, description="Username that the evaluator authenticated with.", title="Username" + ) + platform: Optional[str] = Field( + None, description="OS that the evaluator is running on.", title="Platform" + ) + build_info: Optional[Dict[str, Any]] = Field( + {}, description="Detailed build information.", title="Build Info" + ) + compute_resource_set_id: Optional[str] = Field( + None, + description="ID of the compute resource set that the evaluator belongs to.", + title="Compute Resource Set Id", + ) + change_requested: Optional[datetime] = Field( + None, + description="Date and time of the configuration's last modification request.", + title="Change Requested", + ) + + +class EvaluatorsCountResponse(BaseModel): + num_evaluators: Optional[int] = Field(0, title="Num Evaluators") + + +class EvaluatorsRequest(BaseModel): + evaluators: List[EvaluatorRegistration] = Field( + ..., description="Evaluator details", title="Evaluators" + ) + + +class EvaluatorsResponse(BaseModel): + evaluators: List[EvaluatorRegistration] = Field( + ..., description="Evaluator details", title="Evaluators" + ) + + +class HpcResources(BaseModel): + num_cores_per_node: Optional[int] = Field( + None, description="Number of cores per node.", title="Num Cores Per Node" + ) + num_gpus_per_node: Optional[int] = Field( + None, description="Number of GPUs per node.", title="Num Gpus Per Node" + ) + exclusive: Optional[bool] = Field( + None, description="To not share nodes with other running jobs.", title="Exclusive" + ) + queue: Optional[str] = Field(None, description="Scheduler's queue.", title="Queue") + + +class KubernetesKedaBackend(BaseModel): + plugin_name: Literal["kubernetes"] = Field(..., title="Plugin Name") + debug: Optional[bool] = Field( + False, description="Enable additional debugging of the backend", title="Debug" + ) + job_script_template_path: Optional[str] = Field( + None, + description="Path to the job script template to use in the backend", + title="Job Script Template Path", + ) + working_dir: Optional[str] = Field( + None, description="Working directory to use in the backend", title="Working Dir" + ) + env: Optional[Dict[str, Any]] = Field( + None, description="Static environment variables needed for job execution", title="Env" + ) + cpu_limit: Optional[str] = Field( + "1.0", description="CPU limit applied to each evaluator instance", title="Cpu Limit" + ) + memory_limit: Optional[str] = Field( + "250M", description="Memory limit applied to each evaluator instance", title="Memory Limit" + ) + namespace: Optional[str] = Field( + "default", + description="Kubernetes namespace to use to scale evaluators", + title="Namespace", + ) + target_resource_kind: Optional[str] = Field( + "job", + description="Kubernetes resource kind that REP scaler should scale, should be one of \ + job, deployment, statefulset", + title="Target Resource Kind", + ) + + +class KubernetesResourceScaling(BaseModel): + plugin_name: Literal["kubernetes_resource_scaling"] = Field(..., title="Plugin Name") + target_resource_kind: Optional[str] = Field( + "job", + description="Kubernetes resource kind that REP scaler should scale, should be one of \ + job, deployment, statefulset", + title="Target Resource Kind", + ) + + +class LocalBackend(BaseModel): + plugin_name: Literal["local"] = Field(..., title="Plugin Name") + debug: Optional[bool] = Field( + False, description="Enable additional debugging of the backend", title="Debug" + ) + working_dir: Optional[str] = Field( + None, description="Working directory to use in the backend", title="Working Dir" + ) + env: Optional[Dict[str, Any]] = Field( + None, description="Static environment variables needed for job execution", title="Env" + ) + + +class Machine(BaseModel): + name: str = Field(..., description="Name of the machine", title="Name") + num_cores: int = Field(..., description="Number of cores available", title="Num Cores") + + +class MaxAvailableResourceScaling(BaseModel): + plugin_name: Literal["max_available_resource_scaling"] = Field(..., title="Plugin Name") + match_all_requirements: Optional[bool] = Field( + False, + description="Whether scaling should work with available resource properties specified in \ + compute resource set (default) or require a match of all requirements of the \ + task definition.", + title="Match All Requirements", + ) + + +class MockupBackend(BaseModel): + plugin_name: Literal["mockup"] = Field(..., title="Plugin Name") + debug: Optional[bool] = Field( + False, description="Enable additional debugging of the backend", title="Debug" + ) + + +class Node(BaseModel): + name: Optional[str] = Field(None, description="Node name", title="Name") + total_memory_mb: Optional[int] = Field(..., description="Total memory", title="Total Memory Mb") + total_cores: Optional[int] = Field(..., description="Number of cores", title="Total Cores") + additional_props: Optional[Dict[str, Any]] = Field({}, title="Additional Props") + + +class NodeGroup(BaseModel): + node_names: List[str] = Field(..., title="Node Names") + memory_per_node_mb: Optional[int] = Field( + ..., description="Total Memory per node", title="Memory Per Node Mb" + ) + cores_per_node: Optional[int] = Field( + ..., description="Total cores per node", title="Cores Per Node" + ) + + +class PlatformEnum(Enum): + windows = "windows" + linux = "linux" + darwin = "darwin" + + +class ProblemDetail(BaseModel): + type: Optional[str] = Field(None, title="Type") + title: Optional[str] = Field(None, title="Title") + status: int = Field(..., title="Status") + detail: str = Field(..., title="Detail") + instance: Optional[str] = Field(None, title="Instance") + + +class ProcessLauncherProcessRunner(BaseModel): + plugin_name: Literal["process_launcher_module"] = Field(..., title="Plugin Name") + default_user: Optional[str] = Field( + None, description="The user to use when none is specified", title="Default User" + ) + timeout: Optional[int] = Field( + 30, description="Timeout in seconds before the request is aborted.", title="Timeout" + ) + allowed_users: Optional[List[str]] = Field( + None, description="Users allowed to launch processes", title="Allowed Users" + ) + disallowed_users: Optional[List[str]] = Field( + ["root"], description="Users not allowed to launch processes", title="Disallowed Users" + ) + user_mapping: Optional[Dict[str, str]] = Field( + {}, description="Map of calling user to system user", title="User Mapping" + ) + minimum_uid: Optional[int] = Field( + 1000, description="Minimum UID of users allowed to launch processes", title="Minimum Uid" + ) + minimum_gid: Optional[int] = Field( + 1000, description="Minimum GID of users allowed to launch processes", title="Minimum Gid" + ) + + +class Queue(BaseModel): + name: Optional[str] = Field(None, description="Queue name", title="Name") + node_groups: Optional[List[NodeGroup]] = Field( + None, + description="List of node groups associated with the queue (if available)", + title="Node Groups", + ) + additional_props: Optional[Dict[str, Any]] = Field({}, title="Additional Props") + + +class Resources(BaseModel): + num_cores: Optional[int] = Field(None, description="Number of cores", title="Num Cores") + platform: Optional[PlatformEnum] = Field( + None, description="Basic platform information: 'windows' or 'linux'" + ) + memory: Optional[int] = Field(None, description="Amount of RAM in bytes", title="Memory") + disk_space: Optional[int] = Field( + None, description="Amount of disk space in bytes", title="Disk Space" + ) + custom: Optional[Dict[str, Optional[Union[bool, int, str, float]]]] = Field( + {}, description="Custom resource properties.", title="Custom" + ) + num_instances: Optional[int] = Field( + None, + description="Number of instances/jobs that can be created on the compute resource set", + title="Num Instances", + ) + + +class RestLauncherProcessRunner(BaseModel): + plugin_name: Literal["process_launcher_service"] = Field(..., title="Plugin Name") + launcher_url: Optional[str] = Field( + "http://localhost:4911", + description="URL to use when none is specified", + title="Launcher Url", + ) + verify_ssl: Optional[bool] = Field( + True, description="Check the SSL certificate for HTTPS launchers", title="Verify Ssl" + ) + timeout: Optional[int] = Field( + 30, description="Timeout in seconds before the request is aborted.", title="Timeout" + ) + shell: Optional[bool] = Field( + True, description="Enable the shell interpretation on subprocess run", title="Shell" + ) + + +class ScalerApplicationInfo(BaseModel): + name: str = Field(..., description="Application name", title="Name") + version: str = Field(..., description="Application version", title="Version") + install_path: str = Field( + ..., description="Installation path of application", title="Install Path" + ) + executable: str = Field( + ..., description="Executable path to run application", title="Executable" + ) + environment: Optional[Dict[str, Any]] = Field( + None, description="Environment setup for the process", title="Environment" + ) + capabilities: Optional[List[str]] = Field( + None, description="Capabilities of the application", title="Capabilities" + ) + customization_hook: Optional[Dict[str, Any]] = Field( + None, + description="Details of a custom hook used to modify the configuration before \ + runs are performed", + title="Customization Hook", + ) + resource_name: Optional[str] = Field( + None, + description="Kubernetes object (deployment/statefulset) name to use as target resource \ + by keda", + title="Resource Name", + ) + scaling_max_eval_instances: Optional[int] = Field( + 1, + description="Maximum number of instances that can be created when scaling up", + title="Scaling Max Eval Instances", + ) + scaling_min_eval_instances: Optional[int] = Field( + 0, + description="Minimum number of instances than can be terminated when scaling down", + title="Scaling Min Eval Instances", + ) + scaling_threshold: Optional[int] = Field( + 1, + description="Threshold value to determine when Kubernetes deployments should be \ + scaled up or down", + title="Scaling Threshold", + ) + cool_down_period: Optional[int] = Field( + 60, + description="Period to wait after the last trigger reported active before \ + scaling the resource back to 0", + title="Cool Down Period", + ) + + +class ScalerRegistration(BaseModel): + id: Optional[str] = Field(None, description="Unique ID for this worker", title="Id") + name: Optional[str] = Field( + None, description="User-defined name for this worker.", title="Name" + ) + last_modified: Optional[datetime] = Field( + None, + description="Date and time when the registration was last modified.", + title="Last Modified", + ) + host_id: Optional[str] = Field( + None, description="Static hardware and configuration-based UUID.", title="Host Id" + ) + host_name: Optional[str] = Field( + None, description="Name of the host on which the worker is running", title="Host Name" + ) + username: Optional[str] = Field( + None, description="Username that the evaluator authenticated with.", title="Username" + ) + platform: Optional[str] = Field( + None, description="OS that the evaluator is running on.", title="Platform" + ) + build_info: Optional[Dict[str, Any]] = Field( + {}, description="Detailed build information.", title="Build Info" + ) + config_modified: Optional[datetime] = Field( + None, + description="Date and time of the configuration's last modification", + title="Config Modified", + ) + + +class ScalersCountResponse(BaseModel): + num_scalers: Optional[int] = Field(0, title="Num Scalers") + + +class ScalersRequest(BaseModel): + scalers: List[ScalerRegistration] = Field(..., description="Scaler details", title="Scalers") + + +class ScalersResponse(BaseModel): + scalers: List[ScalerRegistration] = Field(..., description="Scaler details", title="Scalers") + + +class ServiceUserProcessRunner(BaseModel): + plugin_name: Literal["service_user_module"] = Field(..., title="Plugin Name") + + +class Status(BaseModel): + time: str = Field(..., title="Time") + build: Dict[str, Any] = Field(..., title="Build") + + +class ClusterInfo(BaseModel): + id: Optional[str] = Field(None, description="Unique ID for database", title="Id") + crs_id: Optional[str] = Field(None, description="Compute resource set ID", title="Crs Id") + name: Optional[str] = Field(None, description="Cluster name", title="Name") + queues: Optional[List[Queue]] = Field([], title="Queues") + nodes: Optional[List[Node]] = Field([], title="Nodes") + additional_props: Optional[Dict[str, Dict[str, Any]]] = Field({}, title="Additional Props") + + +class Context(BaseModel): + custom: Optional[Dict[str, Optional[Union[int, bool, str, float]]]] = Field( + {}, description="Custom runtime properties.", title="Custom" + ) + machines_list: Optional[List[Machine]] = Field( + None, + description="List of machines for distributed parallel processing.", + title="Machines List", + ) + + +class EvaluatorResources(BaseModel): + num_cores: Optional[int] = Field(None, description="Number of cores", title="Num Cores") + platform: Optional[PlatformEnum] = Field( + None, description="Basic platform information: 'windows' or 'linux'" + ) + memory: Optional[int] = Field(None, description="Amount of RAM in bytes", title="Memory") + disk_space: Optional[int] = Field( + None, description="Amount of disk space in bytes", title="Disk Space" + ) + custom: Optional[Dict[str, Optional[Union[bool, int, str, float]]]] = Field( + {}, description="Custom resource properties.", title="Custom" + ) + hpc_resources: Optional[HpcResources] = None + + +class OrchestrationInterfacesBackend(BaseModel): + plugin_name: Literal["orchestration_interfaces"] = Field(..., title="Plugin Name") + debug: Optional[bool] = Field( + False, description="Enable additional debugging of the backend", title="Debug" + ) + scheduler_type: Optional[str] = Field( + "slurm", + description="Job scheduler type to use in the backend (slurm, pbs, uge...)", + title="Scheduler Type", + ) + scheduler_queue_default: Optional[str] = Field( + None, + description="Job scheduler queue to use for submission", + title="Scheduler Queue Default", + ) + scheduler_command_override: Optional[str] = Field( + None, + description="Path to the JSON file with custom scheduler command definitions", + title="Scheduler Command Override", + ) + scheduler_script_override: Optional[str] = Field( + None, + description="Path to the shell script to template for the scheduler", + title="Scheduler Script Override", + ) + exclusive_default: Optional[bool] = Field( + False, + description="Request the scheduler to hold the nodes exclusively for one request", + title="Exclusive Default", + ) + distributed_default: Optional[bool] = Field( + True, + description="Allow the scheduler to provide multiple machines to fulfill the request", + title="Distributed Default", + ) + num_cores_default: Optional[int] = Field( + 1, + description="Number of cores to request from the scheduler for a task", + title="Num Cores Default", + ) + working_dir: Optional[str] = Field( + None, description="Working directory to use in the backend", title="Working Dir" + ) + env: Optional[Dict[str, Any]] = Field( + None, description="Static environment variables needed for job execution", title="Env" + ) + process_runner: Optional[ + Union[ServiceUserProcessRunner, ProcessLauncherProcessRunner, RestLauncherProcessRunner] + ] = Field( + {"plugin_name": "service_user_module"}, + description="Process runner used to execute commands", + discriminator="plugin_name", + title="Process Runner", + ) + create_workdir: Optional[bool] = Field( + True, + description="Create base and/or user-specific working directories at runtime", + title="Create Workdir", + ) + use_templates: Optional[bool] = Field( + True, + description="Use the templated versions of the scripts and write them to the \ + working directory", + title="Use Templates", + ) + + +class ComputeResourceSet(BaseModel): + name: Optional[str] = Field( + "default", description="Name of the compute resource set.", title="Name" + ) + id: Optional[str] = Field(None, description="Unique ID for this set.", title="Id") + scaler_id: Optional[str] = Field( + None, + description="Temporary. To be removed after transitioning to ``client_id``.", + title="Scaler Id", + ) + last_modified: Optional[datetime] = Field( + None, description="Last modified time.", title="Last Modified" + ) + backend: Optional[ + Union[KubernetesKedaBackend, OrchestrationInterfacesBackend, LocalBackend, MockupBackend] + ] = Field( + {"debug": False, "plugin_name": "local"}, + description="Backend to use in this compute resource set.", + discriminator="plugin_name", + title="Backend", + ) + scaling_strategy: Optional[ + Union[MaxAvailableResourceScaling, KubernetesResourceScaling] + ] = Field( + {"match_all_requirements": False, "plugin_name": "max_available_resource_scaling"}, + description="Scaling strategy to use in this compute resource set.", + discriminator="plugin_name", + title="Scaling Strategy", + ) + available_resources: Optional[Resources] = Field( + {"custom": {}}, description="Available resources in the compute resource set." + ) + available_applications: Optional[List[ScalerApplicationInfo]] = Field( + [], description="List of available applications.", title="Available Applications" + ) + evaluator_requirements_matching: Optional[bool] = Field( + False, + description="Whether the evaluators should do matching of resource and \ + software requirements.", + title="Evaluator Requirements Matching", + ) + evaluator_task_directory_cleanup: Optional[EvaluatorTaskDirectoryCleanup] = Field( + "always", + description="Cleanup policy for task directories that are passed to evaluators.", + title="Evaluator Task Directory Cleanup", + ) + evaluator_auto_shutdown_time: Optional[int] = Field( + 20, + description="Time after which to shut down the evaluator if not running any jobs.", + title="Evaluator Auto Shutdown Time", + ) + evaluator_loop_interval: Optional[int] = Field( + 5, + description="Main evaluator loop is repeated every ``loop_interval`` seconds.", + title="Evaluator Loop Interval", + ) + + +class ComputeResourceSetsRequest(BaseModel): + compute_resource_sets: List[ComputeResourceSet] = Field( + ..., description="Compute resource set details", title="Compute Resource Sets" + ) + + +class ComputeResourceSetsResponse(BaseModel): + compute_resource_sets: List[ComputeResourceSet] = Field( + ..., description="Compute resource set details", title="Compute Resource Sets" + ) + + +class EvaluatorConfiguration(BaseModel): + id: Optional[str] = Field(None, description="Unique DB ID (read-only)", title="Id") + evaluator_id: Optional[str] = Field( + None, description="ID of the parent evaluator (read-only).", title="Evaluator Id" + ) + last_modified: Optional[datetime] = Field( + None, description="Last modified time.", title="Last Modified" + ) + working_directory: Optional[str] = Field(None, title="Working Directory") + local_file_cache_max_size: Optional[int] = Field( + None, + description="Maximum allowed cache size in bytes or ``None``.", + title="Local File Cache Max Size", + ) + max_num_parallel_tasks: Optional[int] = Field(None, title="Max Num Parallel Tasks") + task_directory_cleanup: Optional[TaskDirectoryCleanupEnum] = Field( + None, title="Task Directory Cleanup" + ) + resources: Optional[EvaluatorResources] = {"custom": {}} + task_manager_type: Optional[str] = Field(None, title="Task Manager Type") + loop_interval: Optional[float] = Field( + 5.0, + description="Main evaluator loop is repeated every ``loop_interval`` seconds.", + title="Loop Interval", + ) + local_file_cache: Optional[bool] = Field( + True, + description="Whether to configure a local file cache in the file tool.", + title="Local File Cache", + ) + applications: Optional[List[ApplicationInfo]] = Field( + [], description="List of available applications.", title="Applications" + ) + project_server_select: Optional[bool] = Field( + True, + description="Get project assignments from the server instead of using the \ + locally set values.", + title="Project Server Select", + ) + project_list: Optional[List[str]] = Field( + [], + description="IDs of the projects that the evaluator should work on in order.", + title="Project List", + ) + project_assignment_mode: Optional[str] = Field( + "all_active", + description="How the evaluator is to select projects to work on. \ + Options are ``all_active``, ``disabled``, and ``list``.", + title="Project Assignment Mode", + ) + context: Optional[Context] = Field( + {"custom": {}}, description="Runtime properties to pass to executed tasks." + ) + + +class EvaluatorConfigurationUpdate(BaseModel): + id: Optional[str] = Field(None, description="Unique DB ID (read-only)", title="Id") + evaluator_id: Optional[str] = Field( + None, description="ID of the parent evaluator (read-only).", title="Evaluator Id" + ) + last_modified: Optional[datetime] = Field( + None, description="Last modified time.", title="Last Modified" + ) + working_directory: Optional[str] = Field(None, title="Working Directory") + local_file_cache_max_size: Optional[int] = Field( + None, + description="Maximum allowed cache size in bytes or ``None``.", + title="Local File Cache Max Size", + ) + max_num_parallel_tasks: Optional[int] = Field(None, title="Max Num Parallel Tasks") + task_directory_cleanup: Optional[TaskDirectoryCleanupEnum] = Field( + None, title="Task Directory Cleanup" + ) + resources: Optional[EvaluatorResources] = {"custom": {}} + name: Optional[str] = Field( + None, + description="Update the name of the evaluator (updating the registration).", + title="Name", + ) + loop_interval: Optional[float] = Field( + None, + description="Main evaluator loop is repeated every ``loop_interval`` seconds.", + title="Loop Interval", + ) + local_file_cache: Optional[bool] = Field( + None, + description="Whether to configure a local file cache in the file tool.", + title="Local File Cache", + ) + applications: Optional[List[ApplicationInfo]] = Field( + [], description="List of available applications.", title="Applications" + ) + project_list: Optional[List[str]] = Field( + None, + description="IDs of the projects that the evaluator should work on in order.", + title="Project List", + ) + project_assignment_mode: Optional[str] = Field( + None, + description="How the evaluator selects projects to work on. \ + One of: disabled, all_active, list", + title="Project Assignment Mode", + ) + context: Optional[ContextUpdate] = Field( + {"custom": {}}, description="Runtime properties to pass to executed tasks." + ) + + +class EvaluatorConfigurationUpdatesRequest(BaseModel): + configuration_updates: List[EvaluatorConfigurationUpdate] = Field( + ..., description="Configuration update details", title="Configuration Updates" + ) + + +class EvaluatorConfigurationUpdatesResponse(BaseModel): + configuration_updates: List[EvaluatorConfigurationUpdate] = Field( + ..., description="Configuration update details", title="Configuration Updates" + ) + + +class EvaluatorConfigurationsResponse(BaseModel): + configurations: List[EvaluatorConfiguration] = Field( + ..., description="Evaluator configurations", title="Configurations" + ) diff --git a/ansys/hps/client/warnings.py b/src/ansys/hps/client/warnings.py similarity index 95% rename from ansys/hps/client/warnings.py rename to src/ansys/hps/client/warnings.py index be44c68e..f9abdabe 100644 --- a/ansys/hps/client/warnings.py +++ b/src/ansys/hps/client/warnings.py @@ -19,6 +19,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Module providing unverified HTTPS request warnings.""" class UnverifiedHTTPSRequestsWarning(Warning): diff --git a/tests/jms/test_project_permissions.py b/tests/jms/test_project_permissions.py index c914180f..3e2b5259 100644 --- a/tests/jms/test_project_permissions.py +++ b/tests/jms/test_project_permissions.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/tests/jms/test_task_definition_templates.py b/tests/jms/test_task_definition_templates.py index 6d253bf2..a6d9bf39 100644 --- a/tests/jms/test_task_definition_templates.py +++ b/tests/jms/test_task_definition_templates.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/tests/jms/test_task_files.py b/tests/jms/test_task_files.py index be7ed8b7..e61f4838 100644 --- a/tests/jms/test_task_files.py +++ b/tests/jms/test_task_files.py @@ -1,4 +1,3 @@ -# Copyright (C) 2021 by # Copyright (C) 2024 ANSYS, Inc. and/or its affiliates. # SPDX-License-Identifier: MIT # diff --git a/tests/test_examples.py b/tests/test_examples.py index 92360673..8f47660c 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -256,7 +256,7 @@ def test_cfx_static_mixer(self): from examples.cfx_static_mixer.project_setup import create_project project = create_project( - self.client, name="CFX Static Mixer Test", num_jobs=1, version=ansys_version + self.client, name="CFX static mixer test", num_jobs=1, version=ansys_version ) self.assertIsNotNone(project) @@ -264,7 +264,7 @@ def test_cfx_static_mixer(self): project_api = ProjectApi(self.client, project.id) self.assertEqual(len(project_api.get_jobs()), 1) - self.assertEqual(jms_api.get_project(id=project.id).name, "CFX Static Mixer Test") + self.assertEqual(jms_api.get_project(id=project.id).name, "CFX static mixer test") jms_api.delete_project(project) diff --git a/tox.ini b/tox.ini index 3e5d077b..08db9c9d 100644 --- a/tox.ini +++ b/tox.ini @@ -18,11 +18,11 @@ setenv = PYTHONUNBUFFERED = yes noeval: PYTEST_MARKERS = -m "not requires_evaluator" witheval: PYTEST_MARKERS = -m "requires_evaluator" - coverage: PYTEST_EXTRA_ARGS = --cov=ansys.rep --cov-report=term --cov-report=xml --cov-report=html + coverage: PYTEST_EXTRA_ARGS = -ra -s --durations=0 -p pytest_cov --cov=ansys.hps --cov-report html:.cov/html --cov-report xml:.cov/xml --cov-report term deps = - -r{toxinidir}/requirements/requirements_tests.txt + -e .[tests] commands = - pytest {env:PYTEST_MARKERS:} {env:PYTEST_EXTRA_ARGS:} --junitxml test_results-{envname}.xml {posargs:-vv} + coverage run -m pytest {env:PYTEST_MARKERS:} {env:PYTEST_EXTRA_ARGS:} --junitxml test_results-{envname}.xml {posargs:-vv --cov-append} [testenv:style] description = Checks project code style @@ -36,7 +36,9 @@ commands = [testenv:doc] description = Check if documentation generates properly deps = - -r{toxinidir}/requirements/requirements_doc.txt + -e .[doc] +allowlist_externals = make commands = python archive_examples.py - python -m sphinx -b html doc/source build/sphinx/html + make -C doc html + make -C doc pdf \ No newline at end of file