diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
index 24d7700c5..d7152ea3e 100644
--- a/.github/ISSUE_TEMPLATE/bug.yml
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -61,6 +61,10 @@ body:
label: Which DPF/Ansys version are you using?
multiple: false
options:
+ - 'DPF Server 2025.1.pre0'
+ - 'Ansys 2024 R2'
+ - 'DPF Server 2024.2.pre1'
+ - 'DPF Server 2024.2.pre0'
- 'Ansys 2024 R1'
- 'DPF Server 2024.1.pre0'
- 'Ansys 2023 R2'
diff --git a/.github/release.yml b/.github/release.yml
new file mode 100644
index 000000000..284eba4f5
--- /dev/null
+++ b/.github/release.yml
@@ -0,0 +1,24 @@
+changelog:
+ exclude:
+ authors:
+ - dependabot[bot]
+ categories:
+ - title: Enhancements
+ labels:
+ - enhancement
+ - title: Bug fixes
+ labels:
+ - bug
+ - title: Documentation
+ labels:
+ - documentation
+ - title: Examples
+ labels:
+ - examples
+ - title: CI/CD
+ labels:
+ - CI/CD
+ - title: Maintenance
+ labels:
+ - maintenance
+ - dependencies
diff --git a/.github/workflows/ansys_lab.yml b/.github/workflows/ansys_lab.yml
index 1fafed358..f69adffcc 100644
--- a/.github/workflows/ansys_lab.yml
+++ b/.github/workflows/ansys_lab.yml
@@ -5,14 +5,14 @@ on:
workflow_call:
inputs:
version:
- description: "Release version as 'X.Y.Z'"
+ description: "Release version as 'vX.Y.Z'"
type: string
required: true
# Can be called manually
workflow_dispatch:
inputs:
version:
- description: "Release version as 'X.Y.Z'"
+ description: "Release version as 'vX.Y.Z'"
type: string
required: true
@@ -39,11 +39,11 @@ jobs:
print(f"Deleted {example}")
- name: "Download Release Asset - HTML"
- uses: dsaltares/fetch-gh-release-asset@1.1.1
+ uses: dsaltares/fetch-gh-release-asset@1.1.2
with:
file: HTML-doc-ansys-dpf-post.zip
token: ${{ secrets.GITHUB_TOKEN }}
- version: tags/v${{ inputs.version }}
+ version: tags/${{ inputs.version }}
- name: "Extract ipynb examples"
shell: python
@@ -69,8 +69,8 @@ jobs:
- name: "Push changes"
shell: bash
run: |
- git config --global user.name "rlagha"
+ git config --global user.name "pyansys-ci-bot"
git add .
git status
- git commit -a -m ${{ inputs.version }}
+ git commit -a -m ${{ inputs.version }} || exit 0
git push https://${{ secrets.PYANSYS_CI_BOT_TOKEN }}@github.com/ansys/pydpf-post.git --follow-tags
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 69f72e099..77d2cfc02 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -25,7 +25,7 @@ env:
MODULE: 'post'
DOCUMENTATION_CNAME: 'post.docs.pyansys.com'
MAIN_PYTHON_VERSION: '3.10'
- ANSYS_VERSION: '242'
+ ANSYS_VERSION: '252'
jobs:
debug:
@@ -43,7 +43,7 @@ jobs:
- uses: actions/checkout@v4
- name: "Setup Python"
- uses: actions/setup-python@v5.0.0
+ uses: actions/setup-python@v5.3.0
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
@@ -81,7 +81,8 @@ jobs:
MODULE: ${{env.MODULE}}
dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}}
install_extras: plotting
- wheel: true
+ # Upload the wheel artifact for only one of the OS as it is OS-agnostic
+ wheel: ${{ (matrix.python-version == env.MAIN_PYTHON_VERSION) && (matrix.os == 'windows-latest') }}
wheelhouse: true
standalone_suffix: ${{ inputs.standalone_suffix || ''}}
@@ -96,6 +97,7 @@ jobs:
- name: "Test Docstrings"
uses: ansys/pydpf-actions/test_docstrings@v2.3
+ if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }}
with:
MODULE: ${{env.MODULE}}
PACKAGE_NAME: ${{env.PACKAGE_NAME}}
@@ -118,7 +120,7 @@ jobs:
path: tests/junit/test-results.xml
- name: "Upload coverage to Codecov"
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v5
retro:
name: "Retro-compatibility"
@@ -129,7 +131,7 @@ jobs:
matrix:
python-version: ["3.10"]
os: ["windows-latest", "ubuntu-latest"]
- ANSYS_VERSION: ["241", "232", "231", "222"]
+ ANSYS_VERSION: ["251", "242", "241", "232", "231", "222"]
steps:
- uses: actions/checkout@v4
@@ -180,13 +182,13 @@ jobs:
if: always()
- name: "Upload coverage to Codecov"
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v5
examples:
if: startsWith(github.head_ref, 'master') || github.event.action == 'ready_for_review' || !github.event.pull_request.draft
uses: ./.github/workflows/examples.yml
with:
- ANSYS_VERSION: "242"
+ ANSYS_VERSION: "252"
python_versions: '["3.10"]'
standalone_suffix: ${{ inputs.standalone_suffix || ''}}
secrets: inherit
@@ -195,7 +197,7 @@ jobs:
if: startsWith(github.head_ref, 'master') || github.event.action == 'ready_for_review' || !github.event.pull_request.draft
uses: ./.github/workflows/docs.yml
with:
- ANSYS_VERSION: "242"
+ ANSYS_VERSION: "252"
python_version: "3.10"
standalone_suffix: ${{ inputs.standalone_suffix || ''}}
event_name: ${{ github.event_name }}
@@ -207,9 +209,11 @@ jobs:
needs: [docs]
steps:
- name: "Upload development documentation"
- uses: ansys/actions/doc-deploy-dev@v5
+ uses: ansys/actions/doc-deploy-dev@v8
with:
cname: ${{ env.DOCUMENTATION_CNAME }}
- token: ${{ secrets.GITHUB_TOKEN }}
+ token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }}
+ bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }}
+ bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }}
doc-artifact-name: HTML-doc-ansys-dpf-post.zip
decompress-artifact: true
diff --git a/.github/workflows/ci_release.yml b/.github/workflows/ci_release.yml
index 8c0ac6806..a14b698ac 100644
--- a/.github/workflows/ci_release.yml
+++ b/.github/workflows/ci_release.yml
@@ -11,7 +11,7 @@ on:
standalone_branch_suffix:
description: 'Suffix of the branch on standalone'
required: false
- default: '.sp01'
+ default: '.pre0'
#┌───────────── minute (0 - 59)
#│ ┌───────────── hour (0 - 23)
@@ -28,7 +28,7 @@ env:
MODULE: 'post'
DOCUMENTATION_CNAME: 'post.docs.pyansys.com'
MAIN_PYTHON_VERSION: '3.10'
- ANSYS_VERSION: '241'
+ ANSYS_VERSION: '251'
jobs:
debug:
@@ -46,7 +46,7 @@ jobs:
- uses: actions/checkout@v4
- name: "Setup Python"
- uses: actions/setup-python@v5.0.0
+ uses: actions/setup-python@v5.3.0
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
@@ -62,7 +62,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- ANSYS_VERSION: ["241"]
+ ANSYS_VERSION: ["251"]
python-version: ["3.9", "3.10", "3.11"]
os: ["windows-latest", "ubuntu-latest"]
@@ -85,9 +85,10 @@ jobs:
MODULE: ${{env.MODULE}}
dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}}
install_extras: plotting
- wheel: true
+ # Upload the wheel artifact for only one of the OS as it is OS-agnostic
+ wheel: ${{ (matrix.python-version == env.MAIN_PYTHON_VERSION) && (matrix.os == 'windows-latest') }}
wheelhouse: true
- standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.sp01' }}
+ standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.pre0' }}
- name: "Prepare Testing Environment"
uses: ansys/pydpf-actions/prepare_tests@v2.3
@@ -122,7 +123,7 @@ jobs:
path: tests/junit/test-results.xml
- name: "Upload coverage to Codecov"
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v5
retro:
name: "Retro-compatibility"
@@ -132,7 +133,7 @@ jobs:
matrix:
python-version: ["3.10"]
os: ["windows-latest", "ubuntu-latest"]
- ANSYS_VERSION: ["232", "231", "222"]
+ ANSYS_VERSION: ["242", "241", "232", "231", "222"]
steps:
- uses: actions/checkout@v4
@@ -183,22 +184,22 @@ jobs:
if: always()
- name: "Upload coverage to Codecov"
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v5
examples:
uses: ./.github/workflows/examples.yml
with:
- ANSYS_VERSION: '241'
+ ANSYS_VERSION: '251'
python_versions: '["3.10"]'
- standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.sp01' }}
+ standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.pre0' }}
secrets: inherit
docs:
uses: ./.github/workflows/docs.yml
with:
- ANSYS_VERSION: '241'
+ ANSYS_VERSION: '251'
python_version: "3.10"
- standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.sp01' }}
+ standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.pre0' }}
event_name: ${{ github.event_name }}
secrets: inherit
@@ -215,7 +216,7 @@ jobs:
run: ls -R
- name: "Create draft release"
- uses: softprops/action-gh-release@v1
+ uses: softprops/action-gh-release@v2
with:
files: |
./**/*.whl
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 370488ffc..395d163fa 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -11,7 +11,7 @@ on:
ANSYS_VERSION:
required: false
type: string
- default: "242"
+ default: "252"
standalone_suffix:
description: "Suffix of the branch on standalone"
required: false
@@ -42,7 +42,7 @@ on:
description: "ANSYS version"
required: true
type: string
- default: "242"
+ default: "252"
standalone_suffix:
description: "Suffix of the branch on standalone"
required: false
@@ -63,7 +63,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: "Running documentation style checks"
- uses: ansys/actions/doc-style@v5.0
+ uses: ansys/actions/doc-style@v8
with:
token: ${{ secrets.GITHUB_TOKEN }}
@@ -81,7 +81,7 @@ jobs:
echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV
- name: Setup Python
- uses: actions/setup-python@v5.0.0
+ uses: actions/setup-python@v5.3.0
with:
python-version: ${{ inputs.python_version }}
@@ -165,7 +165,7 @@ jobs:
if: always()
- name: "Zip HTML Documentation"
- uses: vimtor/action-zip@v1.1
+ uses: vimtor/action-zip@v1.2
with:
files: doc/build/html/
dest: HTML-doc-${{env.PACKAGE_NAME}}.zip
@@ -190,7 +190,7 @@ jobs:
echo "Found PDF doc: ${files[0]}"
- name: "Zip PDF Documentation"
- uses: vimtor/action-zip@v1.1
+ uses: vimtor/action-zip@v1.2
if: ${{ inputs.generate_pdf == 'true' }}
with:
files: doc/build/latex/${{ steps.pdf.outputs.PDF_file }}
diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml
index 21a4d7551..d87f6b8f3 100644
--- a/.github/workflows/examples.yml
+++ b/.github/workflows/examples.yml
@@ -11,7 +11,7 @@ on:
ANSYS_VERSION:
required: false
type: string
- default: "242"
+ default: "252"
standalone_suffix:
description: "Suffix of the branch on standalone"
required: false
@@ -29,7 +29,7 @@ on:
description: "ANSYS version to run."
required: true
type: string
- default: "242"
+ default: "252"
standalone_suffix:
description: "Suffix of the branch on standalone"
required: false
@@ -72,7 +72,7 @@ jobs:
echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV
- name: Setup Python
- uses: actions/setup-python@v5.0.0
+ uses: actions/setup-python@v5.3.0
with:
python-version: ${{ matrix.python-version }}
diff --git a/.github/workflows/releaser.yml b/.github/workflows/releaser.yml
index b4f99f934..238305af1 100644
--- a/.github/workflows/releaser.yml
+++ b/.github/workflows/releaser.yml
@@ -3,25 +3,58 @@ name: Release
on:
release:
types: [published]
+# Can be called manually
+ workflow_dispatch:
+ inputs:
+ release_tag:
+ description: 'Release tag'
+ default: 'latest'
+ type: string
env:
DOCUMENTATION_CNAME: 'post.docs.pyansys.com'
jobs:
+ get_latest_tag:
+ name: "Get latest release version tag"
+ runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.step1.outputs.version }}
+ steps:
+ - id: step1
+ name: "Get version tag"
+ shell: bash
+ run: |
+ get_latest_release() {
+ curl --silent "https://api.github.com/repos/$1/releases/latest" |
+ grep '"tag_name":' |
+ sed -E 's/.*"([^"]+)".*/\1/'
+ }
+ version=$(get_latest_release "ansys/pydpf-post")
+ echo $version
+ echo "version=$version" >> "$GITHUB_OUTPUT"
+
Publish_to_PyPI:
- name: Publish to PyPI
+ name: "Publish Release to PyPI"
runs-on: ubuntu-latest
- if: startsWith(github.ref, 'refs/tags/v')
+ needs: get_latest_tag
steps:
- - uses: actions/checkout@v4
+ - name: "Download Release Assets"
+ uses: robinraju/release-downloader@v1.11
+ with:
+ tag: ${{ github.event.inputs.release_tag || needs.get_latest_tag.outputs.version }}
+ fileName: "*.whl"
+ tarBall: false
+ zipBall: false
+ out-file-path: "assets"
+ extract: false
+ token: ${{ secrets.GITHUB_TOKEN }}
- name: "Upload to Public PyPI"
run: |
pip install twine
- pip install build
- python -m build
- twine upload --skip-existing dist/*
+ twine upload --skip-existing assets/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
@@ -29,16 +62,15 @@ jobs:
upload_docs_release:
name: "Upload release documentation"
- if: startsWith(github.ref, 'refs/tags/v')
+ needs: [Publish_to_PyPI, get_latest_tag]
runs-on: ubuntu-latest
- needs: [Publish_to_PyPI]
steps:
-
- name: "Download Release Asset - HTML"
- uses: dsaltares/fetch-gh-release-asset@1.1.1
+ uses: dsaltares/fetch-gh-release-asset@1.1.2
with:
file: HTML-doc-ansys-dpf-post.zip
token: ${{ secrets.GITHUB_TOKEN }}
+ version: ${{ github.event.inputs.release_tag && format('tags/{0}', github.event.inputs.release_tag) || format('tags/{0}', needs.get_latest_tag.outputs.version) }}
- name: "List downloaded assets"
shell: bash
@@ -52,15 +84,18 @@ jobs:
path: HTML-doc-ansys-dpf-post.zip
- name: "Deploy the stable documentation"
- uses: ansys/actions/doc-deploy-stable@v5
+ uses: ansys/actions/doc-deploy-stable@v8
with:
cname: ${{ env.DOCUMENTATION_CNAME }}
- token: ${{ secrets.GITHUB_TOKEN }}
+ token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }}
+ bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }}
+ bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }}
doc-artifact-name: HTML-doc-ansys-dpf-post.zip
decompress-artifact: true
update_ansys_lab_examples:
uses: ./.github/workflows/ansys_lab.yml
+ needs: get_latest_tag
with:
- version: latest
+ version: ${{ github.event.inputs.release_tag || needs.get_latest_tag.outputs.version }}
secrets: inherit
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 44c585428..fd5adf279 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,10 +24,12 @@ repos:
- id: flake8
- repo: https://github.com/codespell-project/codespell
- rev: v2.2.2
+ rev: v2.2.6
hooks:
- id: codespell
args: [
+ "--ignore-words",
+ "doc/styles/config/vocabularies/ANSYS/accept.txt",
"doc src",
"*.py *.rst *.md",
]
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 000000000..aaee1274c
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,12 @@
+# This is the list of PyDPF-Post's significant contributors.
+#
+# This file does not necessarily list everyone who has contributed code.
+#
+# For contributions made under a Corporate CLA, the organization is
+# added to this file.
+#
+# If you have contributed to the repository and want to be added to this file,
+# submit a request.
+#
+#
+ANSYS, Inc.
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
new file mode 100644
index 000000000..8a4d87fd3
--- /dev/null
+++ b/CONTRIBUTORS.md
@@ -0,0 +1,27 @@
+# Contributors
+
+## Project Lead
+
+* [Paul Profizi](https://github.com/PProfizi)
+
+## Individual Contributors
+
+* [Léa Paradis](https://github.com/anslpa)
+* [Camille Bellot](https://github.com/cbellot000)
+* [Roberto Pastor Muela](https://github.com/RobPasMue)
+* [Alex Kaszynski](https://github.com/akaszynski)
+* [Guillem Barroso](https://github.com/GuillemBarroso)
+* [Germán Martínez Ayuso](https://github.com/germa89)
+* [Kathy Pippert](https://github.com/PipKat)
+* [Michael Nale](https://github.com/MichaelNale)
+* [Rafael Canton](https://github.com/rafacanton)
+* [Jess Leonatti](https://github.com/jleonatti)
+* [Jorge Martínez](https://github.com/jorgepiloto)
+* [Jenna Paikowsky](https://github.com/JennaPaikowsky)
+* [Jan Von Rickenbach](https://github.com/janvonrickenbach)
+* [Federico Negri](https://github.com/FedericoNegri)
+* [Revathy Venugopal](https://github.com/Revathyvenugopal162)
+* [Maxime Rey](https://github.com/MaxJPRey)
+* [Antoine Karcher](https://github.com/ansys-akarcher)
+* [Mohamed Koubaa](https://github.com/koubaa)
+* [Ramdane Lagha](https://github.com/rlagha)
diff --git a/LICENSE b/LICENSE
index 9dc06b8ac..6df70bc4b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
The MIT License
-Copyright (c) 2023 ANSYS
+Copyright (c) 2024 ANSYS
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 23aec0555..5043de2da 100644
--- a/README.md
+++ b/README.md
@@ -19,7 +19,7 @@ The latest version of DPF supports Ansys solver results files for:
- Mechanical APDL (`.rst`, `.mode`, `.rfrq`, `.rdsp`)
- LS-DYNA (`.d3plot`, `.binout`)
- Fluent (`.cas/dat.h5`, `.flprj`)
- - CFX (`.cad/dat.cff`, `.flprj`)
+ - CFX (`.cas/dat.cff`, `.flprj`, `.res`)
For more information on file support, see the [main page](https://dpf.docs.pyansys.com/version/stable/index.html)
in the PyDPF-Core documentation.
diff --git a/doc/.vale.ini b/doc/.vale.ini
index ed2cfbee2..139138d0a 100644
--- a/doc/.vale.ini
+++ b/doc/.vale.ini
@@ -26,6 +26,7 @@ Vocab = ANSYS
# Apply the following styles
BasedOnStyles = Vale, Google
+Vale.Terms = NO
# Removing Google-specific rule - Not applicable under some circumstances
Google.WordList = NO
diff --git a/doc/source/conf.py b/doc/source/conf.py
index f985434ff..ce1a873af 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -226,6 +226,11 @@ def reset_servers(gallery_conf, fname, when):
"version_match": get_version_match(__version__),
},
"navbar_end": ["version-switcher", "theme-switcher", "navbar-icon-links"],
+ "static_search": {
+ "threshold": 0.5,
+ "min_chars_for_search": 2,
+ "ignoreLocation": True,
+ },
}
# Add any paths that contain custom static files (such as style sheets) here,
diff --git a/doc/source/getting_started/compatibility.rst b/doc/source/getting_started/compatibility.rst
index 466ac4313..8d60d63af 100644
--- a/doc/source/getting_started/compatibility.rst
+++ b/doc/source/getting_started/compatibility.rst
@@ -22,7 +22,19 @@ libraries.
* - DPF server version
- ansys.dpf.core python module version
- ansys.dpf.post python module version
+ * - 8.1 (DPF Server 2024.2.pre1)
+ - 0.12.0 or later
+ - 0.8.0 or later
+ * - 8.0 (DPF Server 2024.2.pre0)
+ - 0.11.0 or later
+ - 0.7.0 or later
+ * - 7.1 (Ansys 2024 R1)
+ - 0.10.1 or later
+ - 0.6.0 or later
* - 7.0 (DPF Server 2024.1.pre0)
+ - 0.10.0 or later
+ - 0.6.0 or later
+ * - 6.2 (Ansys 2023 R2)
- 0.9.0 or later
- 0.5.0 or later
* - 6.1 (DPF Server 2023.2.pre1)
@@ -38,11 +50,13 @@ libraries.
- 0.5.0 or later
- 0.3.0 or later
* - 3.0 (Ansys 2022 R1)
- - 0.4.0 or later
- - 0.1.0 or later
+ - 0.4.0 to 0.9.0
+ - 0.1.0 to 0.5.0
* - 2.0 (Ansys 2021 R2)
- - 0.3.0 or later
- - 0.1.0 or later
+ - 0.3.0 to 0.9.0**
+ - 0.1.0 to 0.5.0
* - 1.0 (Ansys 2021 R1)
- 0.2.*
- - 0.1.0 or later
+ - 0.1.0
+
+(** Compatibility of DPF 2.0 with ansys-dpf-core 0.5.0 to 0.9.0 is assumed but not certified.)
diff --git a/doc/source/getting_started/index.rst b/doc/source/getting_started/index.rst
index 2c166e145..7b1ed5a2e 100644
--- a/doc/source/getting_started/index.rst
+++ b/doc/source/getting_started/index.rst
@@ -10,7 +10,7 @@ or later or of the standalone DPF Server.
For more information on getting a licensed copy of Ansys, visit the
`Ansys website `_.
For more information on getting the standalone DPF Server, visit the
-`PyDPF-Core documentation `_.
+`PyDPF-Core documentation `_.
.. toctree::
diff --git a/doc/source/index.rst b/doc/source/index.rst
index c22284d9b..5fed2e755 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -17,7 +17,7 @@ The latest version of DPF supports Ansys solver results files for:
- Mechanical APDL (`.rst`, `.mode`, `.rfrq`, `.rdsp`)
- LS-DYNA (`.d3plot`, `.binout`)
- Fluent (`.cas/dat.h5`, `.flprj`)
-- CFX (`.cad/dat.cff`, `.flprj`)
+- CFX (`.cas/dat.cff`, `.flprj`, `.res`)
For more information on file support, see the `main page `_
in the PDF-Core documentation.
diff --git a/doc/styles/.gitignore b/doc/styles/.gitignore
index 080f12aa4..e557e5689 100644
--- a/doc/styles/.gitignore
+++ b/doc/styles/.gitignore
@@ -1,4 +1,3 @@
*
-!Vocab
-!Vocab/**
+!config/vocabularies/**
!.gitignore
\ No newline at end of file
diff --git a/doc/styles/Vocab/ANSYS/accept.txt b/doc/styles/config/vocabularies/ANSYS/accept.txt
similarity index 95%
rename from doc/styles/Vocab/ANSYS/accept.txt
rename to doc/styles/config/vocabularies/ANSYS/accept.txt
index 386d8b250..818bbefc5 100644
--- a/doc/styles/Vocab/ANSYS/accept.txt
+++ b/doc/styles/config/vocabularies/ANSYS/accept.txt
@@ -4,6 +4,7 @@
ANSYS
API
HDF5
+mecanic
postprocess
postprocessing
(PyDPF\s|PyDPF-Core|PyDPF-Post)
diff --git a/doc/styles/Vocab/ANSYS/reject.txt b/doc/styles/config/vocabularies/ANSYS/reject.txt
similarity index 100%
rename from doc/styles/Vocab/ANSYS/reject.txt
rename to doc/styles/config/vocabularies/ANSYS/reject.txt
diff --git a/pyproject.toml b/pyproject.toml
index 3ea1a3bf0..01bc9cfdb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ build-backend = "flit_core.buildapi"
[project]
# Check https://flit.readthedocs.io/en/latest/pyproject_toml.html for all available sections
name = "ansys-dpf-post"
-version = "0.6.1.dev0"
+version = "0.9.3.dev0"
description = "PyDPF-Post Python library."
readme = "README.md"
requires-python = ">=3.9,<4.0"
diff --git a/requirements/requirements_build.txt b/requirements/requirements_build.txt
index 3e1bc0e22..ead39c472 100644
--- a/requirements/requirements_build.txt
+++ b/requirements/requirements_build.txt
@@ -1 +1 @@
-wheel==0.42.0
\ No newline at end of file
+wheel==0.45.0
\ No newline at end of file
diff --git a/requirements/requirements_docs.txt b/requirements/requirements_docs.txt
index faf59cdcd..9e7393e8e 100644
--- a/requirements/requirements_docs.txt
+++ b/requirements/requirements_docs.txt
@@ -1,16 +1,14 @@
-pypandoc==1.12
-imageio==2.33.1
-numpydoc==1.6.0
-imageio-ffmpeg==0.4.7
-Sphinx<7.2.0; python_version == '3.8'
-Sphinx==7.2.6; python_version >= '3.9'
-sphinx-autobuild==2021.3.14
+pypandoc==1.14
+imageio==2.36.0
+numpydoc==1.8.0
+imageio-ffmpeg==0.5.1
+Sphinx==8.1.3
sphinxcontrib-napoleon==0.7
-sphinxcontrib-websupport==1.2.7
-pytest-sphinx==0.5.0
-sphinx-notfound-page==1.0.0
+sphinxcontrib-websupport==2.0.0
+pytest-sphinx==0.6.3
+sphinx-notfound-page==1.0.4
sphinx-copybutton==0.5.0
-sphinx-gallery==0.15.0
-ansys_sphinx_theme==0.13.1
-sphinx-autodoc-typehints==1.25.2
-pyvista==0.43.2
+sphinx-gallery==0.18.0
+ansys_sphinx_theme==1.2.1
+sphinx-autodoc-typehints==2.5.0
+pyvista==0.44.1
diff --git a/requirements/requirements_test.txt b/requirements/requirements_test.txt
index 64916c7dc..9e00434f8 100644
--- a/requirements/requirements_test.txt
+++ b/requirements/requirements_test.txt
@@ -1,5 +1,5 @@
-coverage==7.4.0
-pytest-cov==4.0.0
-pytest-rerunfailures==13.0
-pytest==7.4.2
-pyvista==0.43.2
+coverage==7.6.7
+pytest-cov==6.0.0
+pytest-rerunfailures==14.0
+pytest==8.3.3
+pyvista==0.44.1
diff --git a/src/ansys/dpf/post/dataframe.py b/src/ansys/dpf/post/dataframe.py
index 24e8df4f8..bdc0d13f0 100644
--- a/src/ansys/dpf/post/dataframe.py
+++ b/src/ansys/dpf/post/dataframe.py
@@ -273,12 +273,14 @@ def select(self, **kwargs) -> DataFrame:
# # Treat selection on components
if ref_labels.components in axis_kwargs.keys():
- from ansys.dpf.post.simulation import component_label_to_index
+ from ansys.dpf.post.result_workflows._component_helper import (
+ _component_label_to_index,
+ )
comp_to_extract = axis_kwargs[ref_labels.components]
if not isinstance(comp_to_extract, list):
comp_to_extract = [comp_to_extract]
- component_indexes = [component_label_to_index[c] for c in comp_to_extract]
+ component_indexes = [_component_label_to_index[c] for c in comp_to_extract]
selector_fc = dpf.operators.logic.component_selector_fc(
fields_container=input_fc,
component_number=component_indexes,
diff --git a/src/ansys/dpf/post/fluid_simulation.py b/src/ansys/dpf/post/fluid_simulation.py
index a3466cc96..c1cbf73e3 100644
--- a/src/ansys/dpf/post/fluid_simulation.py
+++ b/src/ansys/dpf/post/fluid_simulation.py
@@ -7,6 +7,7 @@
from os import PathLike
from typing import List, Union
+from ansys.dpf.core import Workflow
from ansys.dpf.core.server_types import BaseServer
from ansys.dpf import core as dpf
@@ -14,8 +15,17 @@
from ansys.dpf.post.dataframe import DataFrame
from ansys.dpf.post.mesh_info import FluidMeshInfo
from ansys.dpf.post.phase import PhasesDict
-from ansys.dpf.post.selection import Selection
-from ansys.dpf.post.simulation import ResultCategory, Simulation
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._connect_workflow_inputs import (
+ _connect_workflow_inputs,
+)
+from ansys.dpf.post.result_workflows._sub_workflows import _create_norm_workflow
+from ansys.dpf.post.result_workflows._utils import AveragingConfig, _append_workflows
+from ansys.dpf.post.selection import Selection, _WfNames
+from ansys.dpf.post.simulation import Simulation
from ansys.dpf.post.species import SpeciesDict
@@ -223,16 +233,40 @@ def _get_result_workflow(
qualifiers: Union[dict, None] = None,
) -> (dpf.Workflow, Union[str, list[str], None], str):
"""Generate (without evaluating) the Workflow to extract results."""
- comp, to_extract, columns = self._create_components(
- base_name, category, components
+ comp, to_extract, columns = _create_components(base_name, category, components)
+
+ initial_result_workflow = Workflow(server=self._model._server)
+
+ initial_result_op = self._model.operator(name=base_name)
+ initial_result_workflow.set_input_name(_WfNames.mesh, initial_result_op, 7)
+ initial_result_workflow.set_input_name(_WfNames.location, initial_result_op, 9)
+
+ initial_result_workflow.add_operator(initial_result_op)
+ initial_result_workflow.set_output_name(
+ _WfNames.output_data, initial_result_op, 0
+ )
+ initial_result_workflow.set_input_name(
+ "time_scoping", initial_result_op.inputs.time_scoping
+ )
+ initial_result_workflow.set_input_name(
+ "mesh_scoping", initial_result_op.inputs.mesh_scoping
)
- # Initialize a workflow
- wf, result_op = self._build_result_workflow(
- name=base_name,
- location=location,
+ _connect_workflow_inputs(
+ initial_result_workflow=initial_result_workflow,
+ split_by_body_workflow=None,
+ rescoping_workflow=None,
force_elemental_nodal=False,
+ location=location,
+ selection=selection,
+ expand_cyclic=False,
+ phase_angle_cyclic=None,
+ mesh=self.mesh._meshed_region,
+ streams_provider=self._model.metadata.streams_provider,
+ data_sources=self._model.metadata.data_sources,
+ averaging_config=AveragingConfig(),
)
+
query_regions_meshes = False
lists = []
lists_labels = []
@@ -268,7 +302,7 @@ def _get_result_workflow(
label_space = {}
for j, label in enumerate(lists_labels):
label_space[label] = c[j]
- result_op.connect(1000 + i, label_space)
+ initial_result_op.connect(1000 + i, label_space)
# Its output is selected as future workflow output for now
# print(result_op)
@@ -277,44 +311,35 @@ def _get_result_workflow(
# A MeshesProvider is required to give meshes as input of the source operator
meshes_provider_op = self._model.operator("meshes_provider")
meshes_provider_op.connect(25, query_regions_meshes)
- result_op.connect(7, meshes_provider_op.outputs.meshes)
- wf.add_operator(meshes_provider_op)
+ initial_result_workflow.connect(
+ _WfNames.mesh, meshes_provider_op.outputs.meshes
+ )
+
+ initial_result_workflow.add_operator(meshes_provider_op)
else:
# Results have been queried on the whole mesh,
# A MeshProvider is required to give the mesh as input of the source operator
mesh_provider_op = self._model.operator("mesh_provider")
- result_op.connect(7, mesh_provider_op.outputs.mesh)
- wf.add_operator(mesh_provider_op)
-
- out = result_op.outputs.fields_container
- # Its inputs are selected as workflow inputs for merging with selection workflows
- wf.set_input_name("time_scoping", result_op.inputs.time_scoping)
- wf.set_input_name("mesh_scoping", result_op.inputs.mesh_scoping)
-
- wf.connect_with(
- selection.time_freq_selection._selection,
- output_input_names=("scoping", "time_scoping"),
- )
- wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names=("scoping", "mesh_scoping"),
- )
-
- # Connect data_sources and streams_container inputs of selection if necessary
- if "streams" in wf.input_names:
- wf.connect("streams", self._model.metadata.streams_provider)
- if "data_sources" in wf.input_names:
- wf.connect("data_sources", self._model.metadata.data_sources)
+ initial_result_workflow.connect(
+ _WfNames.mesh, mesh_provider_op.outputs.mesh
+ )
+ initial_result_workflow.add_operator(mesh_provider_op)
- # Add an optional norm operation if requested
+ output_wf = initial_result_workflow
if norm:
- wf, out, comp, base_name = self._append_norm(wf, out, base_name)
+ norm_workflow, base_name = _create_norm_workflow(
+ create_operator_callable=self._model.operator,
+ base_name=base_name,
+ server=self._model._server,
+ )
+
+ output_wf = _append_workflows(
+ [norm_workflow], current_output_workflow=initial_result_workflow
+ )
- # Set the workflow output
- wf.set_output_name("out", out)
- wf.progress_bar = False
+ output_wf.progress_bar = False
- return wf, comp, base_name
+ return output_wf, comp, base_name
def _get_result(
self,
@@ -510,14 +535,14 @@ def _get_result(
)
# Evaluate the workflow
- fc = wf.get_output("out", dpf.types.fields_container)
+ fc = wf.get_output(_WfNames.output_data, dpf.types.fields_container)
# print(fc)
if location is None and len(fc) > 0:
location = fc[0].location
if location == locations.elemental:
location = "cells"
- _, _, columns = self._create_components(base_name, category, components)
+ _, _, columns = _create_components(base_name, category, components)
return self._create_dataframe(
fc, location, columns, comp, base_name.split("::")[-1], None
)
diff --git a/src/ansys/dpf/post/harmonic_mechanical_simulation.py b/src/ansys/dpf/post/harmonic_mechanical_simulation.py
index c0e028db3..8ce06d095 100644
--- a/src/ansys/dpf/post/harmonic_mechanical_simulation.py
+++ b/src/ansys/dpf/post/harmonic_mechanical_simulation.py
@@ -4,7 +4,7 @@
----------------------------
"""
-from typing import List, Tuple, Union
+from typing import List, Optional, Tuple, Union
import warnings
from ansys.dpf import core as dpf
@@ -18,8 +18,25 @@
ResultsIndex,
SetIndex,
)
+from ansys.dpf.post.result_workflows._build_workflow import (
+ _create_result_workflow_inputs,
+ _create_result_workflows,
+)
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._connect_workflow_inputs import (
+ _connect_averaging_eqv_and_principal_workflows,
+ _connect_workflow_inputs,
+)
+from ansys.dpf.post.result_workflows._utils import (
+ AveragingConfig,
+ _append_workflows,
+ _Rescoping,
+)
from ansys.dpf.post.selection import Selection, _WfNames
-from ansys.dpf.post.simulation import MechanicalSimulation, ResultCategory
+from ansys.dpf.post.simulation import MechanicalSimulation
class HarmonicMechanicalSimulation(MechanicalSimulation):
@@ -37,173 +54,62 @@ def _get_result_workflow(
selection: Union[Selection, None] = None,
expand_cyclic: Union[bool, List[Union[int, List[int]]]] = True,
phase_angle_cyclic: Union[float, None] = None,
+ averaging_config: AveragingConfig = AveragingConfig(),
+ rescoping: Optional[_Rescoping] = None,
) -> (dpf.Workflow, Union[str, list[str], None], str):
"""Generate (without evaluating) the Workflow to extract results."""
- comp, to_extract, _ = self._create_components(base_name, category, components)
-
- force_elemental_nodal = self._requires_manual_averaging(
+ result_workflow_inputs = _create_result_workflow_inputs(
base_name=base_name,
- location=location,
category=category,
+ components=components,
+ norm=norm,
+ location=location,
selection=selection,
+ create_operator_callable=self._model.operator,
+ amplitude=amplitude,
+ sweeping_phase=sweeping_phase,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
-
- # Instantiate the main result operator
- wf, result_op = self._build_result_workflow(
- name=base_name,
+ result_workflows = _create_result_workflows(
+ server=self._model._server,
+ create_operator_callable=self._model.operator,
+ create_workflow_inputs=result_workflow_inputs,
+ )
+ _connect_workflow_inputs(
+ initial_result_workflow=result_workflows.initial_result_workflow,
+ split_by_body_workflow=result_workflows.split_by_bodies_workflow,
+ rescoping_workflow=result_workflows.rescoping_workflow,
+ selection=selection,
+ data_sources=self._model.metadata.data_sources,
+ streams_provider=self._model.metadata.streams_provider,
+ expand_cyclic=expand_cyclic,
+ phase_angle_cyclic=phase_angle_cyclic,
+ mesh=self.mesh._meshed_region,
location=location,
- force_elemental_nodal=force_elemental_nodal,
+ force_elemental_nodal=result_workflows.force_elemental_nodal,
+ averaging_config=averaging_config,
)
- # Its output is selected as future workflow output for now
- out = result_op.outputs.fields_container
- # Its inputs are selected as workflow inputs for merging with selection workflows
- wf.set_input_name("time_scoping", result_op.inputs.time_scoping)
- wf.set_input_name("mesh_scoping", result_op.inputs.mesh_scoping)
-
- wf.connect_with(
- selection.time_freq_selection._selection,
- output_input_names=("scoping", "time_scoping"),
- )
- if selection.requires_mesh:
- # wf.set_input_name(_WfNames.mesh, result_op.inputs.mesh)
- mesh_wf = dpf.Workflow(server=self._model._server)
- mesh_wf.add_operator(self._model.metadata.mesh_provider)
- mesh_wf.set_output_name(
- _WfNames.initial_mesh, self._model.metadata.mesh_provider
- )
- selection.spatial_selection._selection.connect_with(
- mesh_wf,
- output_input_names={_WfNames.initial_mesh: _WfNames.initial_mesh},
- )
+ output_wf = _connect_averaging_eqv_and_principal_workflows(result_workflows)
- wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names={
- "scoping": "mesh_scoping",
- },
+ output_wf = _append_workflows(
+ [
+ result_workflows.component_extraction_workflow,
+ result_workflows.sweeping_phase_workflow,
+ result_workflows.norm_workflow,
+ result_workflows.rescoping_workflow,
+ ],
+ output_wf,
)
- # Treat cyclic cases
- wf = self._treat_cyclic(expand_cyclic, phase_angle_cyclic, wf)
-
- # Connect data_sources and streams_container inputs of selection if necessary
- if "streams" in wf.input_names:
- wf.connect("streams", self._model.metadata.streams_provider)
- if "data_sources" in wf.input_names:
- wf.connect("data_sources", self._model.metadata.data_sources)
+ output_wf.progress_bar = False
- average_op = None
- if force_elemental_nodal:
- average_op = self._create_averaging_operator(
- location=location, selection=selection
- )
-
- # Add a step to compute principal invariants if result is principal
- if category == ResultCategory.principal:
- # Instantiate the required operator
- principal_op = self._model.operator(name="invariants_fc")
- # Corresponds to scripting name principal_invariants
- if average_op is not None:
- average_op[0].connect(0, out)
- principal_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- average_op = None
- else:
- principal_op.connect(0, out)
- wf.add_operator(operator=principal_op)
- # Set as future output of the workflow
- if len(to_extract) == 1:
- out = getattr(principal_op.outputs, f"fields_eig_{to_extract[0]+1}")
- else:
- raise NotImplementedError("Cannot combine principal results yet.")
- # We need to define the behavior for storing different results in a DataFrame
-
- # Add a step to compute equivalent if result is equivalent
- elif category == ResultCategory.equivalent:
- equivalent_op = self._model.operator(name="eqv_fc")
- wf.add_operator(operator=equivalent_op)
- # If a strain result, change the location now
- if (
- average_op is not None
- and category == ResultCategory.equivalent
- and base_name[0] == "E"
- ):
- equivalent_op.connect(0, out)
- average_op[0].connect(0, equivalent_op)
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = average_op[1].outputs.fields_container
- elif average_op is not None:
- average_op[0].connect(0, out)
- equivalent_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = equivalent_op.outputs.fields_container
- else:
- equivalent_op.connect(0, out)
- out = equivalent_op.outputs.fields_container
- average_op = None
- base_name += "_VM"
-
- if average_op is not None:
- average_op[0].connect(0, out)
- wf.add_operators(list(average_op))
- out = average_op[1].outputs.fields_container
-
- # Add an optional component selection step if result is vector, or matrix
- if (
- category
- in [
- ResultCategory.vector,
- ResultCategory.matrix,
- ]
- ) and (to_extract is not None):
- # Instantiate a component selector operator
- extract_op = self._model.operator(name="component_selector_fc")
- # Feed it the current workflow output
- extract_op.connect(0, out)
- # Feed it the requested components
- extract_op.connect(1, to_extract)
- wf.add_operator(operator=extract_op)
- # Set as future output of the workflow
- out = extract_op.outputs.fields_container
- if len(to_extract) == 1:
- base_name += f"_{comp[0]}"
- comp = None
-
- # Add an optional sweeping phase or amplitude operation if requested
- # (must be after comp_selector for U)
- # (must be before norm operation for U)
- if sweeping_phase is not None and not amplitude:
- if isinstance(sweeping_phase, int):
- sweeping_phase = float(sweeping_phase)
- if not isinstance(sweeping_phase, float):
- raise ValueError("Argument sweeping_phase must be a float.")
- sweeping_op = self._model.operator(name="sweeping_phase_fc")
- sweeping_op.connect(0, out)
- sweeping_op.connect(2, sweeping_phase)
- sweeping_op.connect(3, "degree")
- sweeping_op.connect(4, False)
- wf.add_operator(operator=sweeping_op)
- out = sweeping_op.outputs.fields_container
- elif amplitude:
- amplitude_op = self._model.operator(name="amplitude_fc")
- amplitude_op.connect(0, out)
- wf.add_operator(operator=amplitude_op)
- out = amplitude_op.outputs.fields_container
-
- # Add an optional norm operation if requested
- # (must be after sweeping_phase for U)
- if norm:
- wf, out, comp, base_name = self._append_norm(wf, out, base_name)
-
- # Set the workflow output
- wf.set_output_name("out", out)
- wf.progress_bar = False
-
- return wf, comp, base_name
+ return (
+ output_wf,
+ result_workflows.components,
+ result_workflows.base_name,
+ )
def _get_result(
self,
@@ -228,6 +134,7 @@ def _get_result(
phase_angle_cyclic: Union[float, None] = None,
external_layer: Union[bool, List[int]] = False,
skin: Union[bool, List[int]] = False,
+ averaging_config: AveragingConfig = AveragingConfig(),
) -> DataFrame:
"""Extract results from the simulation.
@@ -300,6 +207,10 @@ def _get_result(
is computed over list of elements (not supported for cyclic symmetry). Getting the
skin on more than one result (several time freq sets, split data...) is only
supported starting with Ansys 2023R2.
+ averaging_config:
+ Per default averaging happens across all bodies. The averaging config
+ can define that averaging happens per body and defines the properties that
+ are used to define a body.
Returns
-------
@@ -332,7 +243,7 @@ def _get_result(
"and node_ids are mutually exclusive"
)
- selection = self._build_selection(
+ selection, rescoping = self._build_selection(
base_name=base_name,
category=category,
selection=selection,
@@ -346,6 +257,7 @@ def _get_result(
location=location,
external_layer=external_layer,
skin=skin,
+ average_per_body=averaging_config.average_per_body,
)
wf, comp, base_name = self._get_result_workflow(
@@ -359,14 +271,16 @@ def _get_result(
selection=selection,
expand_cyclic=expand_cyclic,
phase_angle_cyclic=phase_angle_cyclic,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
# Evaluate the workflow
- fc = wf.get_output("out", dpf.types.fields_container)
+ fc = wf.get_output(_WfNames.output_data, dpf.types.fields_container)
disp_wf = self._generate_disp_workflow(fc, selection)
- _, _, columns = self._create_components(base_name, category, components)
+ _, _, columns = _create_components(base_name, category, components)
# Test for empty results
if (len(fc) == 0) or all([len(f) == 0 for f in fc]):
diff --git a/src/ansys/dpf/post/mesh.py b/src/ansys/dpf/post/mesh.py
index 87965a18f..17bf2586f 100644
--- a/src/ansys/dpf/post/mesh.py
+++ b/src/ansys/dpf/post/mesh.py
@@ -9,6 +9,7 @@
from typing import List
import ansys.dpf.core as dpf
+from ansys.dpf.core.faces import Face
from ansys.dpf.core.nodes import Node
from ansys.dpf.core.property_fields_container import (
_MockPropertyFieldsContainer as PropertyFieldsContainer,
@@ -21,6 +22,7 @@
from ansys.dpf.post.faces import FaceListByIndex
from ansys.dpf.post.named_selection import NamedSelections
from ansys.dpf.post.nodes import NodeListByIndex
+from ansys.dpf.post.result_workflows._component_helper import _vector_component_names
class Mesh:
@@ -501,8 +503,6 @@ def coordinates(self) -> post.DataFrame:
Z 0.0000e+00
... ... ...
"""
- from ansys.dpf.post.simulation import vector_component_names
-
label = "coord"
fields_container = dpf.FieldsContainer()
fields_container.add_field(
@@ -517,7 +517,7 @@ def coordinates(self) -> post.DataFrame:
scoping=self._core_object.nodes.scoping,
fc=fields_container,
),
- index.CompIndex(values=vector_component_names),
+ index.CompIndex(values=_vector_component_names),
]
),
columns=index.MultiIndex(
diff --git a/src/ansys/dpf/post/mesh_info.py b/src/ansys/dpf/post/mesh_info.py
index 8ce9e90e0..e02b3acc4 100644
--- a/src/ansys/dpf/post/mesh_info.py
+++ b/src/ansys/dpf/post/mesh_info.py
@@ -139,7 +139,7 @@ def face_zones(self) -> dict:
string_field = self._core_object.get_property("face_zone_names")
for zone_id in string_field.scoping.ids:
zone_name = string_field.get_entity_data_by_id(zone_id)[0]
- zones[zone_id] = zone_name
+ zones[zone_id.item()] = zone_name
self._face_zones = zones
return self._face_zones
@@ -164,7 +164,7 @@ def cell_zones(self) -> dict:
string_field = self._core_object.body_names
for zone_id in string_field.scoping.ids:
zone_name = string_field.get_entity_data_by_id(zone_id)[0]
- zones[zone_id] = zone_name
+ zones[zone_id.item()] = zone_name
self._cell_zones = zones
return self._cell_zones
@@ -192,6 +192,6 @@ def cell_zones_to_face_zones(self) -> dict:
face_zone_ids = property_field.get_entity_data_by_id(
cell_zone_id
).tolist()
- result[cell_zone_id] = face_zone_ids
+ result[cell_zone_id.item()] = face_zone_ids
self._cell_zones_to_face_zones = result
return self._cell_zones_to_face_zones
diff --git a/src/ansys/dpf/post/misc.py b/src/ansys/dpf/post/misc.py
index b63fe6a85..795031d64 100644
--- a/src/ansys/dpf/post/misc.py
+++ b/src/ansys/dpf/post/misc.py
@@ -65,3 +65,9 @@ def __init__(self, additional=None, ncol=3, text_width=80, sort=False, gpu=True)
sort=sort,
extra_meta=extra_meta,
)
+
+
+def _connect_any(operator_input, input_value):
+ # Workaround to connect any inputs: see
+ # https://github.com/ansys/pydpf-core/issues/1670
+ operator_input._operator().connect(operator_input._pin, input_value)
diff --git a/src/ansys/dpf/post/modal_mechanical_simulation.py b/src/ansys/dpf/post/modal_mechanical_simulation.py
index 6ac8feefb..e180cabb2 100644
--- a/src/ansys/dpf/post/modal_mechanical_simulation.py
+++ b/src/ansys/dpf/post/modal_mechanical_simulation.py
@@ -4,13 +4,30 @@
-------------------------
"""
-from typing import List, Union
+from typing import List, Optional, Union
from ansys.dpf import core as dpf
from ansys.dpf.post import locations
from ansys.dpf.post.dataframe import DataFrame
+from ansys.dpf.post.result_workflows._build_workflow import (
+ _create_result_workflow_inputs,
+ _create_result_workflows,
+)
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._connect_workflow_inputs import (
+ _connect_averaging_eqv_and_principal_workflows,
+ _connect_workflow_inputs,
+)
+from ansys.dpf.post.result_workflows._utils import (
+ AveragingConfig,
+ _append_workflows,
+ _Rescoping,
+)
from ansys.dpf.post.selection import Selection, _WfNames
-from ansys.dpf.post.simulation import MechanicalSimulation, ResultCategory
+from ansys.dpf.post.simulation import MechanicalSimulation
class ModalMechanicalSimulation(MechanicalSimulation):
@@ -26,147 +43,59 @@ def _get_result_workflow(
selection: Union[Selection, None] = None,
expand_cyclic: Union[bool, List[Union[int, List[int]]]] = True,
phase_angle_cyclic: Union[float, None] = None,
+ averaging_config: AveragingConfig = AveragingConfig(),
+ rescoping: Optional[_Rescoping] = None,
) -> (dpf.Workflow, Union[str, list[str], None], str):
"""Generate (without evaluating) the Workflow to extract results."""
- comp, to_extract, _ = self._create_components(base_name, category, components)
-
- force_elemental_nodal = self._requires_manual_averaging(
+ result_workflow_inputs = _create_result_workflow_inputs(
base_name=base_name,
- location=location,
category=category,
+ components=components,
+ norm=norm,
+ location=location,
selection=selection,
+ create_operator_callable=self._model.operator,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
-
- # Instantiate the main result operator
- wf, result_op = self._build_result_workflow(
- name=base_name,
+ result_workflows = _create_result_workflows(
+ server=self._model._server,
+ create_operator_callable=self._model.operator,
+ create_workflow_inputs=result_workflow_inputs,
+ )
+ _connect_workflow_inputs(
+ initial_result_workflow=result_workflows.initial_result_workflow,
+ split_by_body_workflow=result_workflows.split_by_bodies_workflow,
+ rescoping_workflow=result_workflows.rescoping_workflow,
+ selection=selection,
+ data_sources=self._model.metadata.data_sources,
+ streams_provider=self._model.metadata.streams_provider,
+ expand_cyclic=expand_cyclic,
+ phase_angle_cyclic=phase_angle_cyclic,
+ mesh=self.mesh._meshed_region,
location=location,
- force_elemental_nodal=force_elemental_nodal,
+ force_elemental_nodal=result_workflows.force_elemental_nodal,
+ averaging_config=averaging_config,
)
- # Its output is selected as future workflow output for now
- out = result_op.outputs.fields_container
- # Its inputs are selected as workflow inputs for merging with selection workflows
- wf.set_input_name("time_scoping", result_op.inputs.time_scoping)
- wf.set_input_name("mesh_scoping", result_op.inputs.mesh_scoping)
+ output_wf = _connect_averaging_eqv_and_principal_workflows(result_workflows)
- wf.connect_with(
- selection.time_freq_selection._selection,
- output_input_names=("scoping", "time_scoping"),
+ output_wf = _append_workflows(
+ [
+ result_workflows.component_extraction_workflow,
+ result_workflows.norm_workflow,
+ result_workflows.rescoping_workflow,
+ ],
+ output_wf,
)
- if selection.requires_mesh:
- mesh_wf = dpf.Workflow(server=self._model._server)
- mesh_wf.add_operator(self._model.metadata.mesh_provider)
- mesh_wf.set_output_name(
- _WfNames.initial_mesh, self._model.metadata.mesh_provider
- )
- selection.spatial_selection._selection.connect_with(
- mesh_wf,
- output_input_names={_WfNames.initial_mesh: _WfNames.initial_mesh},
- )
- wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names={
- "scoping": "mesh_scoping",
- },
+ output_wf.progress_bar = False
+ return (
+ output_wf,
+ result_workflows.components,
+ result_workflows.base_name,
)
- # Treat cyclic cases
- wf = self._treat_cyclic(expand_cyclic, phase_angle_cyclic, wf)
-
- # Connect data_sources and streams_container inputs of selection if necessary
- if "streams" in wf.input_names:
- wf.connect("streams", self._model.metadata.streams_provider)
- if "data_sources" in wf.input_names:
- wf.connect("data_sources", self._model.metadata.data_sources)
-
- average_op = None
- if force_elemental_nodal:
- average_op = self._create_averaging_operator(
- location=location, selection=selection
- )
-
- # Add a step to compute principal invariants if result is principal
- if category == ResultCategory.principal:
- # Instantiate the required operator
- principal_op = self._model.operator(name="invariants_fc")
- # Corresponds to scripting name principal_invariants
- if average_op is not None:
- average_op[0].connect(0, out)
- principal_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- average_op = None
- else:
- principal_op.connect(0, out)
- wf.add_operator(operator=principal_op)
- # Set as future output of the workflow
- if len(to_extract) == 1:
- out = getattr(principal_op.outputs, f"fields_eig_{to_extract[0]+1}")
- else:
- raise NotImplementedError("Cannot combine principal results yet.")
- # We need to define the behavior for storing different results in a DataFrame
-
- # Add a step to compute equivalent if result is equivalent
- elif category == ResultCategory.equivalent:
- equivalent_op = self._model.operator(name="eqv_fc")
- wf.add_operator(operator=equivalent_op)
- # If a strain result, change the location now
- if (
- average_op is not None
- and category == ResultCategory.equivalent
- and base_name[0] == "E"
- ):
- equivalent_op.connect(0, out)
- average_op[0].connect(0, equivalent_op)
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = average_op[1].outputs.fields_container
- elif average_op is not None:
- average_op[0].connect(0, out)
- equivalent_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = equivalent_op.outputs.fields_container
- else:
- equivalent_op.connect(0, out)
- out = equivalent_op.outputs.fields_container
- average_op = None
- base_name += "_VM"
-
- if average_op is not None:
- average_op[0].connect(0, out)
- wf.add_operators(list(average_op))
- out = average_op[1].outputs.fields_container
-
- # Add an optional component selection step if result is vector, matrix, or principal
- if (category in [ResultCategory.vector, ResultCategory.matrix]) and (
- to_extract is not None
- ):
- # Instantiate a component selector operator
- extract_op = self._model.operator(name="component_selector_fc")
- # Feed it the current workflow output
- extract_op.connect(0, out)
- # Feed it the requested components
- extract_op.connect(1, to_extract)
- wf.add_operator(operator=extract_op)
- # Set as future output of the workflow
- out = extract_op.outputs.fields_container
- if len(to_extract) == 1:
- base_name += f"_{comp[0]}"
- comp = None
-
- # Add an optional norm operation if requested
- if norm:
- wf, out, comp, base_name = self._append_norm(wf, out, base_name)
-
- # Set the workflow output
- wf.set_output_name("out", out)
- wf.progress_bar = False
-
- return wf, comp, base_name
-
def _get_result(
self,
base_name: str,
@@ -186,6 +115,7 @@ def _get_result(
phase_angle_cyclic: Union[float, None] = None,
external_layer: Union[bool, List[int]] = False,
skin: Union[bool, List[int]] = False,
+ averaging_config: AveragingConfig = AveragingConfig(),
) -> DataFrame:
"""Extract results from the simulation.
@@ -251,6 +181,10 @@ def _get_result(
is computed over list of elements (not supported for cyclic symmetry). Getting the
skin on more than one result (several time freq sets, split data...) is only
supported starting with Ansys 2023R2.
+ averaging_config:
+ Per default averaging happens across all bodies. The averaging config
+ can define that averaging happens per body and defines the properties that
+ are used to define a body.
Returns
-------
@@ -273,7 +207,7 @@ def _get_result(
elif tot == 0:
set_ids = 1
- selection = self._build_selection(
+ selection, rescoping = self._build_selection(
base_name=base_name,
category=category,
selection=selection,
@@ -287,6 +221,7 @@ def _get_result(
location=location,
external_layer=external_layer,
skin=skin,
+ average_per_body=averaging_config.average_per_body,
)
wf, comp, base_name = self._get_result_workflow(
@@ -298,10 +233,12 @@ def _get_result(
selection=selection,
expand_cyclic=expand_cyclic,
phase_angle_cyclic=phase_angle_cyclic,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
# Evaluate the workflow
- fc = wf.get_output("out", dpf.types.fields_container)
+ fc = wf.get_output(_WfNames.output_data, dpf.types.fields_container)
disp_wf = self._generate_disp_workflow(fc, selection)
@@ -312,7 +249,7 @@ def _get_result(
_WfNames.mesh, dpf.types.meshed_region
)
- _, _, columns = self._create_components(base_name, category, components)
+ _, _, columns = _create_components(base_name, category, components)
return self._create_dataframe(
fc, location, columns, comp, base_name, disp_wf, submesh
)
diff --git a/src/ansys/dpf/post/post_utility.py b/src/ansys/dpf/post/post_utility.py
index 292c73b17..e686b82cb 100644
--- a/src/ansys/dpf/post/post_utility.py
+++ b/src/ansys/dpf/post/post_utility.py
@@ -44,7 +44,7 @@ def load_solution(data_sources, physics_type=None, analysis_type=None):
data_sources: str, ansys.dpf.core.DataSources
Path to the file to open or the :class:`ansys.dpf.core.DataSources` class.
physics_type: common._PhysicsType, str, optional
- Type of phsyics described in the specified data sources. Options are
+ Type of physics described in the specified data sources. Options are
``"mecanic"`` or ``"thermal"``. The default is ``None``, in which case
the data sources are read to determine the physics type.
analysis_type: common._AnalysisType, str, optional
diff --git a/src/ansys/dpf/post/result_workflows/_build_workflow.py b/src/ansys/dpf/post/result_workflows/_build_workflow.py
new file mode 100644
index 000000000..44316d3a6
--- /dev/null
+++ b/src/ansys/dpf/post/result_workflows/_build_workflow.py
@@ -0,0 +1,296 @@
+import dataclasses
+from typing import Callable, List, Optional, Union
+
+from ansys.dpf.core import Operator, Workflow
+from ansys.dpf.core.available_result import _result_properties
+from ansys.dpf.gate.common import locations
+
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._sub_workflows import (
+ _create_averaging_workflow,
+ _create_equivalent_workflow,
+ _create_extract_component_workflow,
+ _create_initial_result_workflow,
+ _create_norm_workflow,
+ _create_principal_workflow,
+ _create_rescoping_workflow,
+ _create_split_scope_by_body_workflow,
+ _create_sweeping_phase_workflow,
+)
+from ansys.dpf.post.result_workflows._utils import (
+ AveragingConfig,
+ _CreateOperatorCallable,
+ _Rescoping,
+)
+from ansys.dpf.post.selection import Selection, _WfNames
+
+
+@dataclasses.dataclass
+class ResultWorkflows:
+ """Contains all the sub-workflows needed to compute a result and some additional information.
+
+ Use _create_result_workflows to create this object.
+ Some workflows are optional. If they are not needed for a particular result
+ , they are set to None.
+ """
+
+ # Workflow to compute the initial result (e.g. stress, displacement, etc.)
+ # The location of this result is always elemental_nodal if force_elemental_nodal is True
+ initial_result_workflow: Workflow
+ # Workflow to average the result. Maps results on skin if needed and averages to the requested
+ # location if force_elemental_nodal is True
+ averaging_workflow: Workflow
+ # The name of the requested result operator with
+ # some modifications (e.g. "_VM" for equivalent stress)
+ base_name: str
+ # If True the initial_result_workflow requests the result at the elemental nodal location
+ # and the averaging_workflow averages the result to the requested location. This is the
+ # case for instance for skin results.
+ force_elemental_nodal: bool
+ # If True, the equivalent_workflow is computed before the averaging_workflow
+ compute_equivalent_before_average: bool = False
+ # List of component names at the end of the workflow. If None, the result is a scalar.
+ components: Optional[list[str]] = None
+ # Workflow to compute the principal components of the result
+ principal_workflow: Optional[Workflow] = None
+ # Workflow to compute the equivalent result
+ equivalent_workflow: Optional[Workflow] = None
+ # Workflow normalize the result
+ norm_workflow: Optional[Workflow] = None
+ # Workflow to extract components of the result
+ component_extraction_workflow: Optional[Workflow] = None
+ # Workflow to sweep the phase of the result
+ sweeping_phase_workflow: Optional[Workflow] = None
+ split_by_bodies_workflow: Optional[Workflow] = None
+ rescoping_workflow: Optional[Workflow] = None
+
+
+@dataclasses.dataclass
+class _AveragingWorkflowInputs:
+ location: Union[locations, str]
+ force_elemental_nodal: bool
+
+
+@dataclasses.dataclass
+class _SweepingPhaseWorkflowInputs:
+ amplitude: bool = (False,)
+ sweeping_phase: Union[float, None] = (None,)
+
+
+@dataclasses.dataclass
+class _CreateWorkflowInputs:
+ averaging_workflow_inputs: _AveragingWorkflowInputs
+ has_skin: bool
+ has_equivalent: bool
+ has_principal: bool
+ has_norm: bool
+ base_name: str
+ component_names: list[str]
+ components_to_extract: list[int]
+ should_extract_components: bool
+ averaging_config: AveragingConfig
+ sweeping_phase_workflow_inputs: Optional[_SweepingPhaseWorkflowInputs] = None
+ rescoping_workflow_inputs: Optional[_Rescoping] = None
+
+
+def _requires_manual_averaging(
+ base_name: str,
+ location: str,
+ category: ResultCategory,
+ has_skin: bool,
+ has_external_layer: bool,
+ create_operator_callable: Callable[[str], Operator],
+ average_per_body: bool,
+):
+ res = _result_properties[base_name] if base_name in _result_properties else None
+ native_location = res["location"] if res is not None else None
+
+ if average_per_body and (
+ native_location == locations.elemental
+ or native_location == locations.elemental_nodal
+ ):
+ return True
+ if category == ResultCategory.equivalent and base_name[0] == "E": # strain eqv
+ return True
+ if res is not None:
+ is_model_cyclic = create_operator_callable("is_cyclic").eval()
+ is_model_cyclic = is_model_cyclic in ["single_stage", "multi_stage"]
+ if has_external_layer and is_model_cyclic and location != native_location:
+ return True
+ elif has_skin and (
+ native_location == locations.elemental
+ or native_location == locations.elemental_nodal
+ ):
+ return True
+ return False
+ return False
+
+
+def _create_result_workflows(
+ server,
+ create_operator_callable: _CreateOperatorCallable,
+ create_workflow_inputs: _CreateWorkflowInputs,
+) -> ResultWorkflows:
+ """Creates all the sub-workflows needed to compute a result.
+
+ The resulting workflows are stored in a ResultWorkflows object.
+ """
+ initial_result_wf = _create_initial_result_workflow(
+ name=create_workflow_inputs.base_name,
+ server=server,
+ create_operator_callable=create_operator_callable,
+ )
+
+ force_elemental_nodal = (
+ create_workflow_inputs.averaging_workflow_inputs.force_elemental_nodal
+ )
+ average_wf = _create_averaging_workflow(
+ location=create_workflow_inputs.averaging_workflow_inputs.location,
+ has_skin=create_workflow_inputs.has_skin,
+ force_elemental_nodal=force_elemental_nodal,
+ create_operator_callable=create_operator_callable,
+ server=server,
+ )
+
+ result_workflows: ResultWorkflows = ResultWorkflows(
+ initial_result_workflow=initial_result_wf,
+ averaging_workflow=average_wf,
+ base_name=create_workflow_inputs.base_name,
+ force_elemental_nodal=force_elemental_nodal,
+ components=create_workflow_inputs.component_names,
+ )
+
+ if create_workflow_inputs.has_principal:
+ result_workflows.principal_workflow = _create_principal_workflow(
+ components_to_extract=create_workflow_inputs.components_to_extract,
+ create_operator_callable=create_operator_callable,
+ server=server,
+ )
+
+ if create_workflow_inputs.has_equivalent:
+ result_workflows.equivalent_workflow = _create_equivalent_workflow(
+ create_operator_callable=create_operator_callable, server=server
+ )
+ result_workflows.base_name += "_VM"
+ # equivalent computation is done before averaging for strain because Mechanical
+ # does it this way (MAPDL has a result named EPEL_EQV in the rst which
+ # Mechanical uses directly
+ if create_workflow_inputs.base_name[0] == "E":
+ result_workflows.compute_equivalent_before_average = True
+
+ if create_workflow_inputs.should_extract_components:
+ (
+ extract_component_wf,
+ base_name,
+ result_is_single_component,
+ ) = _create_extract_component_workflow(
+ create_operator_callable=create_operator_callable,
+ components_to_extract=create_workflow_inputs.components_to_extract,
+ component_names=create_workflow_inputs.component_names,
+ base_name=create_workflow_inputs.base_name,
+ server=server,
+ )
+ result_workflows.component_extraction_workflow = extract_component_wf
+ if result_is_single_component:
+ result_workflows.components = None
+ result_workflows.base_name = base_name
+
+ if create_workflow_inputs.has_norm:
+ norm_wf, base_name = _create_norm_workflow(
+ create_operator_callable=create_operator_callable,
+ base_name=create_workflow_inputs.base_name,
+ server=server,
+ )
+ result_workflows.norm_workflow = norm_wf
+ result_workflows.components = None
+ result_workflows.base_name = base_name
+
+ if create_workflow_inputs.sweeping_phase_workflow_inputs is not None:
+ result_workflows.sweeping_phase_workflow = _create_sweeping_phase_workflow(
+ create_operator_callable=create_operator_callable,
+ server=server,
+ amplitude=create_workflow_inputs.sweeping_phase_workflow_inputs.amplitude,
+ sweeping_phase=create_workflow_inputs.sweeping_phase_workflow_inputs.sweeping_phase,
+ )
+
+ avg_config = create_workflow_inputs.averaging_config
+ if avg_config.average_per_body:
+ result_workflows.split_by_bodies_workflow = (
+ _create_split_scope_by_body_workflow(
+ server=server,
+ body_defining_properties=avg_config.body_defining_properties,
+ )
+ )
+
+ if create_workflow_inputs.rescoping_workflow_inputs is not None:
+ result_workflows.rescoping_workflow = _create_rescoping_workflow(
+ server, create_workflow_inputs.rescoping_workflow_inputs
+ )
+
+ return result_workflows
+
+
+def _create_result_workflow_inputs(
+ base_name: str,
+ category: ResultCategory,
+ components: Union[str, List[str], int, List[int], None],
+ location: str,
+ norm: bool,
+ selection: Selection,
+ create_operator_callable: Callable[[str], Operator],
+ averaging_config: AveragingConfig,
+ rescoping: Optional[_Rescoping] = None,
+ amplitude: bool = False,
+ sweeping_phase: Union[float, None] = 0.0,
+) -> _CreateWorkflowInputs:
+ """Creates a CreateWorkflowInputs object to be used to create the result workflows."""
+ component_names, components_to_extract, _ = _create_components(
+ base_name, category, components
+ )
+
+ force_elemental_nodal = _requires_manual_averaging(
+ base_name=base_name,
+ location=location,
+ category=category,
+ has_skin=_WfNames.skin in selection.spatial_selection._selection.output_names,
+ has_external_layer=_WfNames.external_layer
+ in selection.spatial_selection._selection.output_names,
+ create_operator_callable=create_operator_callable,
+ average_per_body=averaging_config.average_per_body,
+ )
+
+ averaging_workflow_inputs = _AveragingWorkflowInputs(
+ location=location,
+ force_elemental_nodal=force_elemental_nodal,
+ )
+
+ has_principal = category == ResultCategory.principal
+
+ should_extract_components = (
+ category in [ResultCategory.vector, ResultCategory.matrix]
+ ) and components_to_extract is not None
+
+ sweeping_phase_workflow_inputs: Optional[_SweepingPhaseWorkflowInputs] = None
+ if amplitude or sweeping_phase is not None:
+ sweeping_phase_workflow_inputs = _SweepingPhaseWorkflowInputs(
+ amplitude=amplitude,
+ sweeping_phase=sweeping_phase,
+ )
+
+ return _CreateWorkflowInputs(
+ base_name=base_name,
+ averaging_workflow_inputs=averaging_workflow_inputs,
+ has_skin=_WfNames.skin in selection.spatial_selection._selection.output_names,
+ has_norm=norm,
+ component_names=component_names,
+ components_to_extract=components_to_extract,
+ should_extract_components=should_extract_components,
+ has_principal=has_principal,
+ has_equivalent=category == ResultCategory.equivalent,
+ sweeping_phase_workflow_inputs=sweeping_phase_workflow_inputs,
+ averaging_config=averaging_config,
+ rescoping_workflow_inputs=rescoping,
+ )
diff --git a/src/ansys/dpf/post/result_workflows/_component_helper.py b/src/ansys/dpf/post/result_workflows/_component_helper.py
new file mode 100644
index 000000000..7803f67cd
--- /dev/null
+++ b/src/ansys/dpf/post/result_workflows/_component_helper.py
@@ -0,0 +1,150 @@
+from enum import Enum
+
+
+class ResultCategory(Enum):
+ """Enum for available result categories."""
+
+ scalar = 1
+ vector = 2
+ matrix = 3
+ principal = 4
+ equivalent = 5
+
+
+_component_label_to_index = {
+ "1": 0,
+ "2": 1,
+ "3": 2,
+ "4": 3,
+ "5": 4,
+ "6": 5,
+ "X": 0,
+ "Y": 1,
+ "Z": 2,
+ "XX": 0,
+ "YY": 1,
+ "ZZ": 2,
+ "XY": 3,
+ "YZ": 4,
+ "XZ": 5,
+}
+
+_vector_component_names = ["X", "Y", "Z"]
+_matrix_component_names = ["XX", "YY", "ZZ", "XY", "YZ", "XZ"]
+_principal_names = ["1", "2", "3"]
+
+
+def _build_components_for_vector(base_name, components):
+ out, columns = _build_components(base_name, components, _vector_component_names)
+ return out, columns
+
+
+def _build_components_for_matrix(base_name, components):
+ out, columns = _build_components(base_name, components, _matrix_component_names)
+ return out, columns
+
+
+def _build_components(base_name, components, component_names):
+ # Create operator internal names based on components
+ out = []
+ if components is None:
+ out = None
+ else:
+ if isinstance(components, int) or isinstance(components, str):
+ components = [components]
+ if not isinstance(components, list):
+ raise ValueError(
+ "Argument 'components' must be an int, a str, or a list of either."
+ )
+ for comp in components:
+ if not (isinstance(comp, str) or isinstance(comp, int)):
+ raise ValueError(
+ "Argument 'components' can only contain integers and/or strings.\n"
+ f"The provided component '{comp}' is not valid."
+ )
+ if isinstance(comp, int):
+ comp = str(comp)
+ if comp not in _component_label_to_index.keys():
+ raise ValueError(
+ f"Component {comp} is not valid. Please use one of: "
+ f"{list(_component_label_to_index.keys())}."
+ )
+ out.append(_component_label_to_index[comp])
+
+ # Take unique values and build names list
+ if out is None:
+ columns = [base_name + comp for comp in component_names]
+ else:
+ out = list(set(out))
+ columns = [base_name + component_names[i] for i in out]
+ return out, columns
+
+
+def _build_components_for_principal(base_name, components):
+ # Create operator internal names based on principal components
+ out = []
+ if components is None:
+ components = [1]
+
+ if isinstance(components, int) or isinstance(components, str):
+ components = [components]
+ if not isinstance(components, list):
+ raise ValueError(
+ "Argument 'components' must be an int, a str, or a list of either."
+ )
+ for comp in components:
+ if not (isinstance(comp, str) or isinstance(comp, int)):
+ raise ValueError(
+ "Argument 'components' can only contain integers and/or strings."
+ )
+ if str(comp) not in _principal_names:
+ raise ValueError(
+ "A principal component ID must be one of: " f"{_principal_names}."
+ )
+ out.append(int(comp) - 1)
+
+ # Take unique values
+ if out is not None:
+ out = list(set(out))
+ # Build columns names
+ if out is None:
+ columns = [base_name + str(comp) for comp in _principal_names]
+ else:
+ columns = [base_name + _principal_names[i] for i in out]
+ return out, columns
+
+
+def _create_components(base_name: str, category: ResultCategory, components):
+ comp = None
+ # Build the list of requested results
+ if category in [ResultCategory.scalar, ResultCategory.equivalent]:
+ # A scalar or equivalent result has no components
+ to_extract = None
+ columns = [base_name]
+ elif category == ResultCategory.vector:
+ # A vector result can have components selected
+ to_extract, columns = _build_components_for_vector(
+ base_name=base_name, components=components
+ )
+ if to_extract is not None:
+ comp = [_vector_component_names[i] for i in to_extract]
+ else:
+ comp = _vector_component_names
+ elif category == ResultCategory.matrix:
+ # A vector result can have components selected
+ to_extract, columns = _build_components_for_matrix(
+ base_name=base_name, components=components
+ )
+ if to_extract is not None:
+ comp = [_matrix_component_names[i] for i in to_extract]
+ else:
+ comp = _matrix_component_names
+ elif category == ResultCategory.principal:
+ # A principal type of result can have components selected
+ to_extract, columns = _build_components_for_principal(
+ base_name=base_name, components=components
+ )
+ comp = [_principal_names[i] for i in to_extract]
+ else:
+ raise ValueError(f"'{category}' is not a valid category value.")
+ return comp, to_extract, columns
diff --git a/src/ansys/dpf/post/result_workflows/_connect_workflow_inputs.py b/src/ansys/dpf/post/result_workflows/_connect_workflow_inputs.py
new file mode 100644
index 000000000..fff590a59
--- /dev/null
+++ b/src/ansys/dpf/post/result_workflows/_connect_workflow_inputs.py
@@ -0,0 +1,204 @@
+from typing import Any, Optional
+
+from ansys.dpf.core import MeshedRegion, Scoping, ScopingsContainer, Workflow
+
+from ansys.dpf.post.result_workflows._build_workflow import ResultWorkflows
+from ansys.dpf.post.result_workflows._sub_workflows import (
+ _enrich_mesh_with_property_fields,
+)
+from ansys.dpf.post.result_workflows._utils import AveragingConfig
+from ansys.dpf.post.selection import Selection, _WfNames
+
+
+def _connect_cyclic_inputs(expand_cyclic, phase_angle_cyclic, result_wf: Workflow):
+ if expand_cyclic is not False:
+ # If expand_cyclic is a list
+ if isinstance(expand_cyclic, list) and len(expand_cyclic) > 0:
+ # If a list of sector numbers, directly connect it to the num_sectors pin
+ if all(
+ [isinstance(expand_cyclic_i, int) for expand_cyclic_i in expand_cyclic]
+ ):
+ if any([i < 1 for i in expand_cyclic]):
+ raise ValueError(
+ "Sector selection with 'expand_cyclic' starts at 1."
+ )
+ result_wf.connect(
+ _WfNames.cyclic_sectors_to_expand,
+ [i - 1 for i in expand_cyclic],
+ )
+ # If any is a list, treat it as per stage num_sectors
+ elif any(
+ [isinstance(expand_cyclic_i, list) for expand_cyclic_i in expand_cyclic]
+ ):
+ # Create a ScopingsContainer to fill
+ sectors_scopings = ScopingsContainer()
+ sectors_scopings.labels = ["stage"]
+ # For each potential num_sectors, check either an int or a list of ints
+ for i, num_sectors_stage_i in enumerate(expand_cyclic):
+ # Prepare num_sectors data
+ if isinstance(num_sectors_stage_i, int):
+ num_sectors_stage_i = [num_sectors_stage_i]
+ elif isinstance(num_sectors_stage_i, list):
+ if not all([isinstance(n, int) for n in num_sectors_stage_i]):
+ raise ValueError(
+ "'expand_cyclic' only accepts lists of int values >= 1."
+ )
+ # num_sectors_stage_i is now a list of int,
+ # add an equivalent Scoping with the correct 'stage' label value
+ if any([i < 1 for i in num_sectors_stage_i]):
+ raise ValueError(
+ "Sector selection with 'expand_cyclic' starts at 1."
+ )
+ sectors_scopings.add_scoping(
+ {"stage": i},
+ Scoping(ids=[i - 1 for i in num_sectors_stage_i]),
+ )
+ result_wf.connect(
+ _WfNames.cyclic_sectors_to_expand, inpt=sectors_scopings
+ )
+ elif not isinstance(expand_cyclic, bool):
+ raise ValueError(
+ "'expand_cyclic' argument can only be a boolean or a list."
+ )
+ result_wf.connect(_WfNames.read_cyclic, 3) # Connect the read_cyclic pin
+ elif _WfNames.read_cyclic in result_wf.input_names:
+ result_wf.connect(_WfNames.read_cyclic, 1) # Connect the read_cyclic pin
+ if phase_angle_cyclic is not None:
+ if isinstance(phase_angle_cyclic, int):
+ phase_angle_cyclic = float(phase_angle_cyclic)
+ if not isinstance(phase_angle_cyclic, float):
+ raise ValueError(
+ "'phase_angle_cyclic' argument only accepts a single float value."
+ )
+ result_wf.connect(_WfNames.cyclic_phase, phase_angle_cyclic)
+
+
+def _connect_workflow_inputs(
+ initial_result_workflow: Workflow,
+ split_by_body_workflow: Optional[Workflow],
+ rescoping_workflow: Optional[Workflow],
+ force_elemental_nodal: bool,
+ location: str,
+ selection: Selection,
+ expand_cyclic: bool,
+ phase_angle_cyclic: Any,
+ mesh: MeshedRegion,
+ streams_provider: Any,
+ data_sources: Any,
+ averaging_config: AveragingConfig,
+):
+ """Connects the inputs of the initial result workflow.
+
+ The initial result workflow is the first workflow in the result workflows chain, which
+ extracts the raw results from the data sources.
+ """
+ selection_wf = selection.spatial_selection._selection
+
+ if selection.spatial_selection.requires_mesh:
+ selection_wf.connect(_WfNames.initial_mesh, mesh)
+
+ if averaging_config.average_per_body:
+ _enrich_mesh_with_property_fields(
+ mesh, averaging_config.body_defining_properties, streams_provider
+ )
+
+ if split_by_body_workflow is not None:
+ split_by_body_workflow.connect(_WfNames.mesh, mesh)
+ if force_elemental_nodal:
+ split_by_body_workflow.connect(_WfNames.scoping_location, "ElementalNodal")
+ else:
+ split_by_body_workflow.connect(_WfNames.scoping_location, location)
+ split_by_body_workflow.connect_with(
+ selection_wf, output_input_names={_WfNames.scoping: _WfNames.scoping}
+ )
+ selection_wf = split_by_body_workflow
+
+ initial_result_workflow.connect_with(
+ selection_wf,
+ output_input_names={"scoping": "mesh_scoping"},
+ )
+
+ initial_result_workflow.connect_with(
+ selection.time_freq_selection._selection,
+ output_input_names=("scoping", "time_scoping"),
+ )
+
+ # Connect data_sources and streams_container inputs of selection if necessary
+ # Note: streams and data_sources inputs are inherited from the selection_workflow
+ # connected above
+ if (
+ "streams" in initial_result_workflow.input_names
+ and streams_provider is not None
+ ):
+ initial_result_workflow.connect("streams", streams_provider)
+ if "data_sources" in initial_result_workflow.input_names:
+ initial_result_workflow.connect("data_sources", data_sources)
+
+ _connect_cyclic_inputs(
+ expand_cyclic=expand_cyclic,
+ phase_angle_cyclic=phase_angle_cyclic,
+ result_wf=initial_result_workflow,
+ )
+
+ if force_elemental_nodal:
+ initial_result_workflow.connect(_WfNames.location, "ElementalNodal")
+ elif location:
+ initial_result_workflow.connect(_WfNames.location, location)
+
+ initial_result_workflow.connect(_WfNames.mesh, mesh)
+
+ if rescoping_workflow:
+ rescoping_workflow.connect(_WfNames.mesh, mesh)
+ if _WfNames.data_sources in rescoping_workflow.input_names:
+ rescoping_workflow.connect(_WfNames.data_sources, data_sources)
+
+
+def _connect_averaging_eqv_and_principal_workflows(
+ result_workflows: ResultWorkflows,
+):
+ """Connects the averaging, equivalent, and principal workflows.
+
+ The order of these workflows depends on result_workflows.compute_equivalent_before_average.
+ Only one of equivalent_workflow or principal_workflow can be active at the same time.
+ """
+ averaging_wf_connections = {
+ _WfNames.output_data: _WfNames.input_data,
+ _WfNames.skin: _WfNames.skin,
+ _WfNames.skin_input_mesh: _WfNames.skin_input_mesh,
+ }
+ assert not (
+ result_workflows.equivalent_workflow is not None
+ and result_workflows.principal_workflow is not None
+ )
+
+ principal_or_eqv_wf = (
+ result_workflows.equivalent_workflow or result_workflows.principal_workflow
+ )
+
+ if not result_workflows.compute_equivalent_before_average:
+ result_workflows.averaging_workflow.connect_with(
+ result_workflows.initial_result_workflow,
+ output_input_names=averaging_wf_connections,
+ )
+ if principal_or_eqv_wf is not None:
+ principal_or_eqv_wf.connect_with(
+ result_workflows.averaging_workflow,
+ output_input_names={_WfNames.output_data: _WfNames.input_data},
+ )
+ output_wf = principal_or_eqv_wf
+ else:
+ output_wf = result_workflows.averaging_workflow
+
+ else:
+ assert principal_or_eqv_wf is not None
+ principal_or_eqv_wf.connect_with(
+ result_workflows.initial_result_workflow,
+ output_input_names={_WfNames.output_data: _WfNames.input_data},
+ )
+ result_workflows.averaging_workflow.connect_with(
+ principal_or_eqv_wf,
+ output_input_names=averaging_wf_connections,
+ )
+ output_wf = result_workflows.averaging_workflow
+
+ return output_wf
diff --git a/src/ansys/dpf/post/result_workflows/_sub_workflows.py b/src/ansys/dpf/post/result_workflows/_sub_workflows.py
new file mode 100644
index 000000000..d8b935728
--- /dev/null
+++ b/src/ansys/dpf/post/result_workflows/_sub_workflows.py
@@ -0,0 +1,311 @@
+from typing import Union
+
+from ansys.dpf.core import MeshedRegion, StreamsContainer, Workflow, operators
+from ansys.dpf.gate.common import locations
+
+from ansys.dpf.post.misc import _connect_any
+from ansys.dpf.post.result_workflows._utils import _CreateOperatorCallable, _Rescoping
+from ansys.dpf.post.selection import SpatialSelection, _WfNames
+
+
+def _create_averaging_workflow(
+ has_skin: bool,
+ location: str,
+ force_elemental_nodal: bool,
+ create_operator_callable: _CreateOperatorCallable,
+ server,
+):
+ average_wf = Workflow(server=server)
+
+ input_data_fwd = create_operator_callable(name="forward_fc")
+ averaged_data_fwd = create_operator_callable(name="forward_fc")
+
+ mesh_averaging_input_fwd = create_operator_callable(name="forward_fc")
+ average_wf.add_operators(
+ [input_data_fwd, averaged_data_fwd, mesh_averaging_input_fwd]
+ )
+ mesh_averaging_input_fwd.connect(0, input_data_fwd, 0)
+
+ average_wf.set_input_name(_WfNames.input_data, input_data_fwd)
+ average_wf.set_output_name(_WfNames.output_data, averaged_data_fwd, 0)
+
+ skin_mesh_fwd = create_operator_callable(name="forward")
+
+ map_to_skin = True
+ if not server.meet_version("8.0"):
+ # Before 8.0, the skin mapping was not supported
+ # for elemental and nodal results (only elemental nodal).
+ # In the nodal case the mapping is not needed, but we still
+ # call the operator to be consistent.
+ # For elemental results the mapping was not working before 8.0.
+ map_to_skin = force_elemental_nodal
+
+ if has_skin and map_to_skin:
+ average_wf.add_operator(skin_mesh_fwd)
+ average_wf.set_input_name(_WfNames.skin, skin_mesh_fwd)
+
+ if server.meet_version("6.2"):
+ solid_to_skin_operator = create_operator_callable(name="solid_to_skin_fc")
+ else:
+ solid_to_skin_operator = create_operator_callable(name="solid_to_skin")
+
+ average_wf.add_operator(solid_to_skin_operator)
+ solid_to_skin_operator.connect(0, input_data_fwd, 0)
+ mesh_averaging_input_fwd.connect(0, solid_to_skin_operator, 0)
+
+ if hasattr(solid_to_skin_operator.inputs, "mesh_scoping"):
+ _connect_any(solid_to_skin_operator.inputs.mesh_scoping, skin_mesh_fwd)
+ # To keep for retro-compatibility
+ else:
+ _connect_any(solid_to_skin_operator.inputs.mesh, skin_mesh_fwd)
+
+ if server.meet_version("8.0"):
+ # solid mesh_input only supported for server version
+ # 8.0 and up
+ average_wf.set_input_name(
+ _WfNames.skin_input_mesh, solid_to_skin_operator.inputs.solid_mesh
+ )
+
+ if (
+ location == locations.nodal or location == locations.elemental
+ ) and force_elemental_nodal:
+ if location == locations.nodal:
+ operator_name = "to_nodal_fc"
+ else:
+ operator_name = "to_elemental_fc"
+ mesh_average_op = create_operator_callable(name=operator_name)
+ average_wf.add_operator(mesh_average_op)
+ mesh_average_op.connect(0, mesh_averaging_input_fwd, 0)
+ averaged_data_fwd.connect(0, mesh_average_op, 0)
+ else:
+ averaged_data_fwd.connect(0, mesh_averaging_input_fwd, 0)
+
+ return average_wf
+
+
+def _create_principal_workflow(
+ components_to_extract: list[int],
+ create_operator_callable: _CreateOperatorCallable,
+ server,
+):
+ principal_wf = Workflow(server=server)
+
+ # Instantiate the required operator
+ principal_op = create_operator_callable(name="invariants_fc")
+ principal_wf.add_operator(principal_op)
+ principal_wf.set_input_name(_WfNames.input_data, principal_op)
+ # Set as future output of the workflow
+ if len(components_to_extract) == 1:
+ principal_output = getattr(
+ principal_op.outputs, f"fields_eig_{components_to_extract[0] + 1}"
+ )
+ principal_wf.set_output_name(_WfNames.output_data, principal_output)
+ else:
+ raise NotImplementedError("Cannot combine principal results yet.")
+ # We need to define the behavior for storing different results in a DataFrame
+
+ return principal_wf
+
+
+def _create_equivalent_workflow(
+ create_operator_callable: _CreateOperatorCallable, server
+):
+ equivalent_wf = Workflow(server=server)
+ equivalent_op = create_operator_callable(name="eqv_fc")
+ equivalent_wf.add_operator(operator=equivalent_op)
+ equivalent_wf.set_input_name(_WfNames.input_data, equivalent_op)
+ equivalent_wf.set_output_name(
+ _WfNames.output_data, equivalent_op.outputs.fields_container
+ )
+ return equivalent_wf
+
+
+def _create_extract_component_workflow(
+ create_operator_callable: _CreateOperatorCallable,
+ components_to_extract: list[int],
+ component_names: list[str],
+ base_name: str,
+ server,
+):
+ extract_component_wf = Workflow(server=server)
+
+ # Instantiate a component selector operator
+ extract_op = create_operator_callable(name="component_selector_fc")
+ # Feed it the current workflow output
+ extract_component_wf.set_input_name(_WfNames.input_data, extract_op)
+
+ # Feed it the requested components
+ extract_op.connect(1, components_to_extract)
+ extract_component_wf.add_operator(operator=extract_op)
+ # Set as future output of the workflow
+ extract_component_wf.set_output_name(
+ _WfNames.output_data, extract_op.outputs.fields_container
+ )
+
+ result_is_single_component = False
+ if len(components_to_extract) == 1:
+ new_base_name = base_name + f"_{component_names[0]}"
+ result_is_single_component = True
+ else:
+ new_base_name = base_name
+
+ return extract_component_wf, new_base_name, result_is_single_component
+
+
+def _create_norm_workflow(
+ create_operator_callable: _CreateOperatorCallable, base_name: str, server
+):
+ norm_wf = Workflow(server=server)
+ norm_op = create_operator_callable(name="norm_fc")
+ norm_wf.add_operator(operator=norm_op)
+ norm_wf.set_input_name(_WfNames.input_data, norm_op)
+ norm_wf.set_output_name(_WfNames.output_data, norm_op)
+ new_base_name = base_name + "_N"
+ return norm_wf, new_base_name
+
+
+def _create_initial_result_workflow(
+ name: str, server, create_operator_callable: _CreateOperatorCallable
+):
+ initial_result_workflow = Workflow(server=server)
+
+ initial_result_op = create_operator_callable(name=name)
+ initial_result_workflow.set_input_name(_WfNames.mesh, initial_result_op, 7)
+ initial_result_workflow.set_input_name(_WfNames.location, initial_result_op, 9)
+
+ initial_result_workflow.add_operator(initial_result_op)
+ initial_result_workflow.set_output_name(_WfNames.output_data, initial_result_op, 0)
+ initial_result_workflow.set_input_name(
+ "time_scoping", initial_result_op.inputs.time_scoping
+ )
+ initial_result_workflow.set_input_name(
+ "mesh_scoping", initial_result_op.inputs.mesh_scoping
+ )
+
+ initial_result_workflow.set_input_name(_WfNames.read_cyclic, initial_result_op, 14)
+ initial_result_workflow.set_input_name(
+ _WfNames.cyclic_sectors_to_expand, initial_result_op, 18
+ )
+ initial_result_workflow.set_input_name(_WfNames.cyclic_phase, initial_result_op, 19)
+
+ return initial_result_workflow
+
+
+def _create_sweeping_phase_workflow(
+ create_operator_callable: _CreateOperatorCallable,
+ server,
+ amplitude: bool,
+ sweeping_phase: Union[float, None],
+):
+ sweeping_phase_workflow = Workflow(server=server)
+ # Add an optional sweeping phase or amplitude operation if requested
+ # (must be after comp_selector for U)
+ # (must be before norm operation for U)
+ if sweeping_phase is not None and not amplitude:
+ if isinstance(sweeping_phase, int):
+ sweeping_phase = float(sweeping_phase)
+ if not isinstance(sweeping_phase, float):
+ raise ValueError("Argument sweeping_phase must be a float.")
+ sweeping_op = create_operator_callable(name="sweeping_phase_fc")
+ sweeping_op.connect(2, sweeping_phase)
+ sweeping_op.connect(3, "degree")
+ sweeping_op.connect(4, False)
+ sweeping_phase_workflow.add_operator(operator=sweeping_op)
+
+ sweeping_phase_workflow.set_input_name(_WfNames.input_data, sweeping_op)
+ sweeping_phase_workflow.set_output_name(_WfNames.output_data, sweeping_op)
+ elif amplitude:
+ amplitude_op = create_operator_callable(name="amplitude_fc")
+ sweeping_phase_workflow.add_operator(operator=amplitude_op)
+ sweeping_phase_workflow.set_input_name(_WfNames.input_data, amplitude_op)
+ sweeping_phase_workflow.set_output_name(_WfNames.output_data, amplitude_op)
+ else:
+ return None
+
+ return sweeping_phase_workflow
+
+
+def _enrich_mesh_with_property_fields(
+ mesh: MeshedRegion,
+ property_names: list[str],
+ streams_provider: StreamsContainer,
+):
+ property_operator = operators.metadata.property_field_provider_by_name()
+ property_operator.inputs.streams_container(streams_provider)
+
+ for property_name in property_names:
+ # Some of the requested properties might already be part of the mesh
+ # property fields
+ if property_name not in mesh.available_property_fields:
+ property_operator.inputs.property_name(property_name)
+ property_field = property_operator.eval()
+
+ # Rescope the property field to the element scoping of the mesh
+ # to ensure the split by property operator works correctly
+ rescope_op = operators.scoping.rescope_property_field(
+ mesh_scoping=mesh.elements.scoping, fields=property_field
+ )
+
+ mesh.set_property_field(
+ property_name, rescope_op.outputs.fields_as_property_field()
+ )
+
+
+def _create_split_scope_by_body_workflow(server, body_defining_properties: list[str]):
+ split_scope_by_body_wf = Workflow(server=server)
+ split_scop_op = operators.scoping.split_on_property_type()
+ split_scope_by_body_wf.add_operator(split_scop_op)
+ split_scope_by_body_wf.set_input_name(_WfNames.mesh, split_scop_op.inputs.mesh)
+ split_scope_by_body_wf.set_input_name(
+ _WfNames.scoping_location, split_scop_op.inputs.requested_location
+ )
+ split_scope_by_body_wf.set_input_name(
+ _WfNames.scoping, split_scop_op.inputs.mesh_scoping
+ )
+
+ for idx, property_name in enumerate(body_defining_properties):
+ split_scop_op.connect(13 + idx, property_name)
+ split_scope_by_body_wf.set_output_name(
+ _WfNames.scoping, split_scop_op.outputs.mesh_scoping
+ )
+ return split_scope_by_body_wf
+
+
+def _create_rescoping_workflow(server, rescoping: _Rescoping):
+ selection = SpatialSelection(server=server)
+
+ if rescoping.named_selections is not None:
+ selection.select_named_selection(rescoping.named_selections)
+
+ if rescoping.node_ids is not None:
+ selection.select_nodes(rescoping.node_ids)
+
+ rescoping_wf = Workflow(server=server)
+
+ transpose_scoping_op = operators.scoping.transpose()
+ rescoping_wf.add_operator(transpose_scoping_op)
+ transpose_scoping_op.inputs.requested_location(rescoping.requested_location)
+ rescoping_wf.set_input_name(
+ _WfNames.mesh, transpose_scoping_op.inputs.meshed_region
+ )
+
+ rescoping_op = operators.scoping.rescope_fc()
+ rescoping_wf.add_operator(rescoping_op)
+ rescoping_op.inputs.mesh_scoping(
+ transpose_scoping_op.outputs.mesh_scoping_as_scoping
+ )
+ rescoping_wf.set_input_name(
+ _WfNames.input_data, rescoping_op.inputs.fields_container
+ )
+ rescoping_wf.set_input_name(
+ _WfNames.scoping, transpose_scoping_op.inputs.mesh_scoping
+ )
+ rescoping_wf.set_output_name(
+ _WfNames.output_data, rescoping_op.outputs.fields_container
+ )
+
+ rescoping_wf.connect_with(
+ selection._selection, output_input_names={_WfNames.scoping: _WfNames.scoping}
+ )
+
+ return rescoping_wf
diff --git a/src/ansys/dpf/post/result_workflows/_utils.py b/src/ansys/dpf/post/result_workflows/_utils.py
new file mode 100644
index 000000000..05c40c579
--- /dev/null
+++ b/src/ansys/dpf/post/result_workflows/_utils.py
@@ -0,0 +1,92 @@
+import dataclasses
+from typing import Optional, Protocol
+
+from ansys.dpf.core import Operator, Workflow
+
+from ansys.dpf.post.selection import _WfNames
+
+
+class _CreateOperatorCallable(Protocol):
+ # Callable to create an operator with a given name.
+ # This usually corresponds to model.operator
+ def __call__(self, name: str) -> Operator:
+ ...
+
+
+class _Rescoping:
+ # Defines a rescoping that needs to be performed at the end
+ # of the results workflow. This is needed, because
+ # the scoping sometimes needs to be broadened when force_elemental_nodal is
+ # True.
+ def __init__(
+ self,
+ requested_location: str,
+ named_selections: Optional[list[str]] = None,
+ node_ids: Optional[list[int]] = None,
+ ):
+ if named_selections is not None and node_ids is not None:
+ raise ValueError(
+ "Arguments named_selections and node_ids are mutually exclusive"
+ )
+ if named_selections is None and node_ids is None:
+ raise ValueError(
+ "At least one of named_selections and node_ids must be provided"
+ )
+ self._node_ids = node_ids
+ self._named_selections = named_selections
+ self._requested_location = requested_location
+
+ @property
+ def node_ids(self):
+ return self._node_ids
+
+ @property
+ def named_selections(self):
+ return self._named_selections
+
+ @property
+ def requested_location(self):
+ return self._requested_location
+
+
+@dataclasses.dataclass
+class AveragingConfig:
+ """Configuration for averaging of results."""
+
+ # List of properties that define a body. The mesh is split by these properties to
+ # get the bodies.
+ body_defining_properties: Optional[list[str]] = None
+ # If True, the results are averaged per body. The bodies are determined
+ # by the body_defining_properties.
+ average_per_body: bool = False
+
+
+def _append_workflows(workflows: list[Workflow], current_output_workflow: Workflow):
+ # Append multiple workflows to last_wf. The new
+ # workflows must have an _WfNames.input_data and _WfNames.output_data pin. The
+ # current_output_workflow must have an _WfNames.output_data pin.
+ # Returns the last appended workflow.
+ # Workflows are not appended if they are None.
+ for workflow in workflows:
+ current_output_workflow = _append_workflow(
+ new_wf=workflow, last_wf=current_output_workflow
+ )
+ return current_output_workflow
+
+
+def _append_workflow(new_wf: Optional[Workflow], last_wf: Workflow):
+ # Append a single workflow to last_wf. The new
+ # workflow must have an _WfNames.input_data pin and the last_wf
+ # must have an _WfNames.output_data pin.
+ # Returns the appended workflow if it was not None, otherwise returns last_wf.
+ if new_wf is None:
+ return last_wf
+
+ assert _WfNames.input_data in new_wf.input_names
+ assert _WfNames.output_data in new_wf.output_names
+ assert _WfNames.output_data in last_wf.output_names
+ new_wf.connect_with(
+ last_wf,
+ output_input_names={_WfNames.output_data: _WfNames.input_data},
+ )
+ return new_wf
diff --git a/src/ansys/dpf/post/selection.py b/src/ansys/dpf/post/selection.py
index 9d19bd781..321431e8e 100644
--- a/src/ansys/dpf/post/selection.py
+++ b/src/ansys/dpf/post/selection.py
@@ -8,6 +8,8 @@
from typing import TYPE_CHECKING, List
+from ansys.dpf.post.misc import _connect_any
+
if TYPE_CHECKING: # pragma: no cover
from ansys.dpf.post.simulation import Simulation
from ansys.dpf.post.mesh import Mesh
@@ -33,18 +35,23 @@
class _WfNames:
data_sources = "data_sources"
scoping = "scoping"
+ skin_input_mesh = "skin_input_mesh"
final_scoping = "final_scoping"
scoping_a = "scoping_a"
scoping_b = "scoping_b"
streams = "streams"
initial_mesh = "initial_mesh"
mesh = "mesh"
+ location = "location"
+ scoping_location = "scoping_location"
external_layer = "external_layer"
skin = "skin"
read_cyclic = "read_cyclic"
cyclic_sectors_to_expand = "cyclic_sectors_to_expand"
cyclic_phase = "cyclic_phase"
result = "result"
+ input_data = "input_data"
+ output_data = "output_data"
def _is_model_cyclic(is_cyclic: str):
@@ -227,6 +234,7 @@ def select_named_selection(
self,
named_selection: Union[str, List[str]],
location: Union[str, locations, None] = None,
+ inclusive: bool = False,
) -> None:
"""Select a mesh scoping corresponding to one or several named selections.
@@ -239,12 +247,18 @@ def select_named_selection(
Location of the mesh entities to extract results at. Available locations are listed in
class:`post.locations` and are: `post.locations.nodal` or
`post.locations.elemental`.
+ inclusive:
+ If True and the named selection is nodal,
+ include all elements that touch a node. If False, include only elements
+ that share all the nodes in the scoping.
"""
+ int_inclusive = 1 if inclusive else 0
if isinstance(named_selection, str):
op = operators.scoping.on_named_selection(
requested_location=location,
named_selection_name=named_selection,
server=self._server,
+ int_inclusive=int_inclusive,
)
self._selection.add_operator(op)
self._selection.set_input_name(
@@ -266,6 +280,7 @@ def select_named_selection(
requested_location=location,
named_selection_name=ns,
server=self._server,
+ int_inclusive=int_inclusive
# data_sources=forward_ds.outputs.any,
# streams_container=forward_sc.outputs.any,
)
@@ -404,9 +419,18 @@ def select_skin(
be returned by the Operator ``operators.metadata.is_cyclic``. Used to get the skin
on the expanded mesh.
"""
- op = operators.mesh.skin(server=self._server)
- self._selection.add_operator(op)
- mesh_input = op.inputs.mesh
+ skin_operator = operators.mesh.skin(server=self._server)
+ self._selection.add_operator(skin_operator)
+
+ initial_mesh_fwd_op = operators.utility.forward(server=self._server)
+ self._selection.set_input_name(
+ _WfNames.initial_mesh, initial_mesh_fwd_op.inputs.any
+ )
+ self._selection.add_operator(initial_mesh_fwd_op)
+
+ skin_operator_input_mesh_fwd_op = operators.utility.forward(server=self._server)
+ _connect_any(skin_operator_input_mesh_fwd_op.inputs.any, initial_mesh_fwd_op)
+ self._selection.add_operator(skin_operator_input_mesh_fwd_op)
if _is_model_cyclic(is_model_cyclic):
mesh_provider_cyc = operators.mesh.mesh_provider()
@@ -436,13 +460,11 @@ def select_skin(
server=self._server,
)
self._selection.add_operator(mesh_by_scop_op)
- op.inputs.mesh.connect(mesh_by_scop_op)
+ skin_operator_input_mesh_fwd_op.inputs.any(mesh_by_scop_op)
else:
- op.inputs.mesh.connect(mesh_provider_cyc)
- self._selection.set_input_name(
- _WfNames.initial_mesh, mesh_provider_cyc, 100
- ) # hack
- mesh_input = None
+ skin_operator_input_mesh_fwd_op.inputs.any(mesh_provider_cyc)
+
+ mesh_provider_cyc.connect(100, initial_mesh_fwd_op.outputs.any)
elif elements is not None:
if not isinstance(elements, Scoping):
@@ -453,17 +475,19 @@ def select_skin(
scoping=elements, server=self._server
)
self._selection.add_operator(mesh_by_scop_op)
- mesh_input = mesh_by_scop_op.inputs.mesh
- op.inputs.mesh.connect(mesh_by_scop_op)
+ skin_operator_input_mesh_fwd_op.inputs.any(mesh_by_scop_op.outputs.mesh)
+ _connect_any(mesh_by_scop_op.inputs.mesh, initial_mesh_fwd_op.outputs.any)
- if mesh_input is not None:
- self._selection.set_input_name(_WfNames.initial_mesh, mesh_input)
+ if not _is_model_cyclic(is_model_cyclic):
if location == result_native_location:
- self._selection.set_output_name(_WfNames.mesh, op.outputs.mesh)
- self._selection.set_output_name(_WfNames.skin, op.outputs.mesh)
+ self._selection.set_output_name(
+ _WfNames.mesh, skin_operator.outputs.mesh
+ )
+
+ self._selection.set_output_name(_WfNames.skin, skin_operator.outputs.mesh)
if location == locations.nodal and result_native_location == locations.nodal:
self._selection.set_output_name(
- _WfNames.scoping, op.outputs.nodes_mesh_scoping
+ _WfNames.scoping, skin_operator.outputs.nodes_mesh_scoping
)
elif not _is_model_cyclic(is_model_cyclic) and (
@@ -471,16 +495,31 @@ def select_skin(
or result_native_location == locations.elemental_nodal
):
transpose_op = operators.scoping.transpose(
- mesh_scoping=op.outputs.nodes_mesh_scoping, server=self._server
+ mesh_scoping=skin_operator.outputs.nodes_mesh_scoping,
+ server=self._server,
)
self._selection.add_operator(transpose_op)
- self._selection.set_input_name(
- _WfNames.initial_mesh, transpose_op.inputs.meshed_region
+ _connect_any(
+ transpose_op.inputs.meshed_region, initial_mesh_fwd_op.outputs.any
)
+
self._selection.set_output_name(
_WfNames.scoping, transpose_op.outputs.mesh_scoping_as_scoping
)
+ _connect_any(
+ skin_operator.inputs.mesh, skin_operator_input_mesh_fwd_op.outputs.any
+ )
+
+ # Provide the input mesh from which a skin was generated
+ # This is useful because the skin_mesh contains the mapping of
+ # skin elements to the original mesh element indices, which is used
+ # by the solid_to_skin_fc operator. The skin_input_mesh can be passed
+ # to the solid_to_skin_fc operator to ensure that the mapping is correct.
+ self._selection.set_output_name(
+ _WfNames.skin_input_mesh, skin_operator_input_mesh_fwd_op.outputs.any
+ )
+
def select_with_scoping(self, scoping: Scoping):
"""Directly sets the scoping as the spatial selection.
@@ -549,6 +588,37 @@ def select_nodes_of_elements(
_WfNames.scoping, op.outputs.mesh_scoping_as_scoping
)
+ def select_elements_of_nodes(
+ self, nodes: Union[List[int], Scoping], mesh: Mesh, inclusive: bool = True
+ ) -> None:
+ """Select all elements of nodes using the nodes' IDs or a nodal mesh scoping.
+
+ Parameters
+ ----------
+ nodes:
+ node IDs or nodal mesh scoping.
+ mesh:
+ Mesh containing the necessary connectivity.
+ inclusive:
+ If True, include all elements that touch a node. If False, include only elements
+ that share all the nodes in the scoping.
+ """
+ if isinstance(nodes, Scoping):
+ scoping = nodes
+ else:
+ scoping = Scoping(location=locations.nodal, ids=nodes, server=self._server)
+
+ op = operators.scoping.transpose(
+ mesh_scoping=scoping,
+ meshed_region=mesh._meshed_region,
+ inclusive=1 if inclusive else 0,
+ requested_location=locations.elemental,
+ )
+ self._selection.add_operator(op)
+ self._selection.set_output_name(
+ _WfNames.scoping, op.outputs.mesh_scoping_as_scoping
+ )
+
def select_nodes_of_faces(
self,
faces: Union[List[int], Scoping],
@@ -732,28 +802,6 @@ def outputs_mesh(self) -> bool:
"""Whether the selection workflow as an output named ``mesh``."""
return _WfNames.mesh in self._selection.output_names
- def requires_manual_averaging(
- self,
- location: Union[str, locations],
- result_native_location: Union[str, locations],
- is_model_cyclic: str = "not_cyclic",
- ) -> bool:
- """Whether the selection workflow requires to manually build the averaging workflow."""
- output_names = self._selection.output_names
- is_model_cyclic = is_model_cyclic in ["single_stage", "multi_stage"]
- if (
- _WfNames.external_layer in output_names
- and is_model_cyclic
- and location != result_native_location
- ):
- return True
- elif _WfNames.skin in output_names and (
- result_native_location == locations.elemental
- or result_native_location == locations.elemental_nodal
- ):
- return True
- return False
-
class Selection:
"""The ``Selection`` class helps define the domain on which results are evaluated.
@@ -842,6 +890,7 @@ def select_named_selection(
self,
named_selection: Union[str, List[str]],
location: Union[str, locations, None] = None,
+ inclusive: bool = False,
) -> None:
"""Select a mesh scoping corresponding to one or several named selections.
@@ -853,8 +902,14 @@ def select_named_selection(
Location of the mesh entities to extract results at. Available locations are listed in
class:`post.locations` and are: `post.locations.nodal` or
`post.locations.elemental`.
+ inclusive:
+ If True and the named selection is nodal,
+ include all elements that touch a node. If False, include only elements
+ that share all the nodes in the scoping.
"""
- self._spatial_selection.select_named_selection(named_selection, location)
+ self._spatial_selection.select_named_selection(
+ named_selection, location, inclusive
+ )
def select_nodes(self, nodes: Union[List[int], Scoping]) -> None:
"""Select a mesh scoping with its node IDs.
@@ -908,6 +963,25 @@ def select_nodes_of_elements(
"""
self._spatial_selection.select_nodes_of_elements(elements, mesh)
+ def select_elements_of_nodes(
+ self, nodes: Union[List[int], Scoping], mesh: Mesh, inclusive: bool = True
+ ) -> None:
+ """Select elements belonging to nodes defined by their IDs.
+
+ Select a elemental mesh scoping corresponding to nodes.
+
+ Parameters
+ ----------
+ nodes:
+ node IDs.
+ mesh:
+ Mesh containing the connectivity.
+ inclusive:
+ If True, include all elements that touch a node. If False, include only elements
+ that share all the nodes in the scoping.
+ """
+ self._spatial_selection.select_elements_of_nodes(nodes, mesh, inclusive)
+
def select_nodes_of_faces(
self, faces: Union[List[int], Scoping], mesh: Mesh
) -> None:
@@ -1026,16 +1100,3 @@ def requires_mesh(self) -> bool:
def outputs_mesh(self) -> bool:
"""Whether the selection workflow as an output named ``mesh``."""
return self._spatial_selection.outputs_mesh
-
- def requires_manual_averaging(
- self,
- location: Union[str, locations],
- result_native_location: Union[str, locations],
- is_model_cyclic: str = "not_cyclic",
- ) -> bool:
- """Whether the selection workflow requires to manually build the averaging workflow."""
- return self._spatial_selection.requires_manual_averaging(
- location=location,
- result_native_location=result_native_location,
- is_model_cyclic=is_model_cyclic,
- )
diff --git a/src/ansys/dpf/post/simulation.py b/src/ansys/dpf/post/simulation.py
index 12614410f..cb42b8654 100644
--- a/src/ansys/dpf/post/simulation.py
+++ b/src/ansys/dpf/post/simulation.py
@@ -5,14 +5,13 @@
"""
from abc import ABC
-from enum import Enum
from os import PathLike
import re
-from typing import Dict, List, Tuple, Union
+from typing import Dict, List, Optional, Tuple, Union
import warnings
import ansys.dpf.core as dpf
-from ansys.dpf.core import DataSources, Model, TimeFreqSupport, Workflow, errors
+from ansys.dpf.core import DataSources, Model, TimeFreqSupport, errors
from ansys.dpf.core.available_result import _result_properties
from ansys.dpf.core.common import elemental_properties
from ansys.dpf.core.plotter import DpfPlotter
@@ -31,48 +30,15 @@
)
from ansys.dpf.post.mesh import Mesh
from ansys.dpf.post.meshes import Meshes
-from ansys.dpf.post.selection import Selection, _WfNames
-
-component_label_to_index = {
- "1": 0,
- "2": 1,
- "3": 2,
- "4": 3,
- "5": 4,
- "6": 5,
- "X": 0,
- "Y": 1,
- "Z": 2,
- "XX": 0,
- "YY": 1,
- "ZZ": 2,
- "XY": 3,
- "YZ": 4,
- "XZ": 5,
-}
-
-vector_component_names = ["X", "Y", "Z"]
-matrix_component_names = ["XX", "YY", "ZZ", "XY", "YZ", "XZ"]
-principal_names = ["1", "2", "3"]
-
-
-class ResultCategory(Enum):
- """Enum for available result categories."""
-
- scalar = 1
- vector = 2
- matrix = 3
- principal = 4
- equivalent = 5
+from ansys.dpf.post.result_workflows._build_workflow import _requires_manual_averaging
+from ansys.dpf.post.result_workflows._component_helper import ResultCategory
+from ansys.dpf.post.result_workflows._utils import _Rescoping
+from ansys.dpf.post.selection import Selection
class Simulation(ABC):
"""Base class of all PyDPF-Post simulation types."""
- _vector_component_names = vector_component_names
- _matrix_component_names = matrix_component_names
- _principal_names = principal_names
-
def __init__(self, data_sources: DataSources, model: Model):
"""Initialize the simulation using a ``dpf.core.Model`` object."""
self._model = model
@@ -358,7 +324,7 @@ def split_mesh_by_properties(
List[elemental_properties],
Dict[elemental_properties, Union[int, List[int]]],
],
- ) -> Meshes:
+ ) -> Union[Mesh, Meshes, None]:
"""Splits the simulation Mesh according to properties and returns it as Meshes.
Parameters
@@ -434,87 +400,6 @@ def __str__(self):
txt += self._model.__str__()
return txt
- def _build_components_for_vector(self, base_name, components):
- out, columns = self._build_components(
- base_name, components, self._vector_component_names
- )
- return out, columns
-
- def _build_components_for_matrix(self, base_name, components):
- out, columns = self._build_components(
- base_name, components, self._matrix_component_names
- )
- return out, columns
-
- def _build_components(self, base_name, components, component_names):
- # Create operator internal names based on components
- out = []
- if components is None:
- out = None
- else:
- if isinstance(components, int) or isinstance(components, str):
- components = [components]
- if not isinstance(components, list):
- raise ValueError(
- "Argument 'components' must be an int, a str, or a list of either."
- )
- for comp in components:
- if not (isinstance(comp, str) or isinstance(comp, int)):
- raise ValueError(
- "Argument 'components' can only contain integers and/or strings.\n"
- f"The provided component '{comp}' is not valid."
- )
- if isinstance(comp, int):
- comp = str(comp)
- if comp not in component_label_to_index.keys():
- raise ValueError(
- f"Component {comp} is not valid. Please use one of: "
- f"{list(component_label_to_index.keys())}."
- )
- out.append(component_label_to_index[comp])
-
- # Take unique values and build names list
- if out is None:
- columns = [base_name + comp for comp in component_names]
- else:
- out = list(set(out))
- columns = [base_name + component_names[i] for i in out]
- return out, columns
-
- def _build_components_for_principal(self, base_name, components):
- # Create operator internal names based on principal components
- out = []
- if components is None:
- components = [1]
-
- if isinstance(components, int) or isinstance(components, str):
- components = [components]
- if not isinstance(components, list):
- raise ValueError(
- "Argument 'components' must be an int, a str, or a list of either."
- )
- for comp in components:
- if not (isinstance(comp, str) or isinstance(comp, int)):
- raise ValueError(
- "Argument 'components' can only contain integers and/or strings."
- )
- if str(comp) not in self._principal_names:
- raise ValueError(
- "A principal component ID must be one of: "
- f"{self._principal_names}."
- )
- out.append(int(comp) - 1)
-
- # Take unique values
- if out is not None:
- out = list(set(out))
- # Build columns names
- if out is None:
- columns = [base_name + str(comp) for comp in self._principal_names]
- else:
- columns = [base_name + self._principal_names[i] for i in out]
- return out, columns
-
def _build_result_operator(
self,
name: str,
@@ -529,71 +414,6 @@ def _build_result_operator(
op.connect(9, location)
return op
- def _build_result_workflow(
- self,
- name: str,
- location: Union[locations, str],
- force_elemental_nodal: bool,
- ) -> (dpf.Workflow, dpf.Operator):
- op = self._model.operator(name=name)
- op.connect(7, self.mesh._meshed_region)
- if force_elemental_nodal:
- op.connect(9, "ElementalNodal")
- elif location:
- op.connect(9, location)
- wf = Workflow(server=self._model._server)
- wf.add_operator(op)
- wf.set_input_name(_WfNames.read_cyclic, op, 14)
- wf.set_input_name(_WfNames.cyclic_sectors_to_expand, op, 18)
- wf.set_input_name(_WfNames.cyclic_phase, op, 19)
- wf.set_output_name(_WfNames.result, op, 0)
- return wf, op
-
- def _append_norm(self, wf, out, base_name):
- """Append a norm operator to the current result workflow."""
- norm_op = self._model.operator(name="norm_fc")
- norm_op.connect(0, out)
- wf.add_operator(operator=norm_op)
- base_name += "_N"
- out = norm_op.outputs.fields_container
- comp = None
- return wf, out, comp, base_name
-
- def _create_components(self, base_name, category, components):
- comp = None
- # Build the list of requested results
- if category in [ResultCategory.scalar, ResultCategory.equivalent]:
- # A scalar or equivalent result has no components
- to_extract = None
- columns = [base_name]
- elif category == ResultCategory.vector:
- # A vector result can have components selected
- to_extract, columns = self._build_components_for_vector(
- base_name=base_name, components=components
- )
- if to_extract is not None:
- comp = [self._vector_component_names[i] for i in to_extract]
- else:
- comp = self._vector_component_names
- elif category == ResultCategory.matrix:
- # A vector result can have components selected
- to_extract, columns = self._build_components_for_matrix(
- base_name=base_name, components=components
- )
- if to_extract is not None:
- comp = [self._matrix_component_names[i] for i in to_extract]
- else:
- comp = self._matrix_component_names
- elif category == ResultCategory.principal:
- # A principal type of result can have components selected
- to_extract, columns = self._build_components_for_principal(
- base_name=base_name, components=components
- )
- comp = [self._principal_names[i] for i in to_extract]
- else:
- raise ValueError(f"'{category}' is not a valid category value.")
- return comp, to_extract, columns
-
def _generate_disp_workflow(self, fc, selection) -> Union[dpf.Workflow, None]:
# Check displacement is an available result
if not any(
@@ -684,14 +504,17 @@ def _create_dataframe(
values = fc.get_available_ids_for_label(label)
# Then try to gather the correspond string values for display
try:
- label_support = self.result_info.qualifier_label_support(label)
- names_field = label_support.string_field_support_by_property(
- "names"
- )
- values = [
- names_field.get_entity_data_by_id(value)[0] + f" ({value})"
- for value in values
- ]
+ if label == "mat":
+ values = fc.get_available_ids_for_label("mat")
+ else:
+ label_support = self.result_info.qualifier_label_support(label)
+ names_field = label_support.string_field_support_by_property(
+ "names"
+ )
+ values = [
+ names_field.get_entity_data_by_id(value)[0] + f" ({value})"
+ for value in values
+ ]
except (
ValueError,
errors.DPFServerException,
@@ -720,78 +543,6 @@ def _create_dataframe(
# Return the result wrapped in a DPF_Dataframe
return df
- @staticmethod
- def _treat_cyclic(expand_cyclic, phase_angle_cyclic, result_wf):
- if expand_cyclic is not False:
- # If expand_cyclic is a list
- if isinstance(expand_cyclic, list) and len(expand_cyclic) > 0:
- # If a list of sector numbers, directly connect it to the num_sectors pin
- if all(
- [
- isinstance(expand_cyclic_i, int)
- for expand_cyclic_i in expand_cyclic
- ]
- ):
- if any([i < 1 for i in expand_cyclic]):
- raise ValueError(
- "Sector selection with 'expand_cyclic' starts at 1."
- )
- result_wf.connect(
- _WfNames.cyclic_sectors_to_expand,
- [i - 1 for i in expand_cyclic],
- )
- # If any is a list, treat it as per stage num_sectors
- elif any(
- [
- isinstance(expand_cyclic_i, list)
- for expand_cyclic_i in expand_cyclic
- ]
- ):
- # Create a ScopingsContainer to fill
- sectors_scopings = dpf.ScopingsContainer()
- sectors_scopings.labels = ["stage"]
- # For each potential num_sectors, check either an int or a list of ints
- for i, num_sectors_stage_i in enumerate(expand_cyclic):
- # Prepare num_sectors data
- if isinstance(num_sectors_stage_i, int):
- num_sectors_stage_i = [num_sectors_stage_i]
- elif isinstance(num_sectors_stage_i, list):
- if not all(
- [isinstance(n, int) for n in num_sectors_stage_i]
- ):
- raise ValueError(
- "'expand_cyclic' only accepts lists of int values >= 1."
- )
- # num_sectors_stage_i is now a list of int,
- # add an equivalent Scoping with the correct 'stage' label value
- if any([i < 1 for i in num_sectors_stage_i]):
- raise ValueError(
- "Sector selection with 'expand_cyclic' starts at 1."
- )
- sectors_scopings.add_scoping(
- {"stage": i},
- dpf.Scoping(ids=[i - 1 for i in num_sectors_stage_i]),
- )
- result_wf.connect(
- _WfNames.cyclic_sectors_to_expand, inpt=sectors_scopings
- )
- elif not isinstance(expand_cyclic, bool):
- raise ValueError(
- "'expand_cyclic' argument can only be a boolean or a list."
- )
- result_wf.connect(_WfNames.read_cyclic, 3) # Connect the read_cyclic pin
- else:
- result_wf.connect(_WfNames.read_cyclic, 1) # Connect the read_cyclic pin
- if phase_angle_cyclic is not None:
- if isinstance(phase_angle_cyclic, int):
- phase_angle_cyclic = float(phase_angle_cyclic)
- if not isinstance(phase_angle_cyclic, float):
- raise ValueError(
- "'phase_angle_cyclic' argument only accepts a single float value."
- )
- result_wf.connect(_WfNames.cyclic_phase, phase_angle_cyclic)
- return result_wf
-
class MechanicalSimulation(Simulation, ABC):
"""Base class for mechanical type simulations.
@@ -827,7 +578,8 @@ def _build_selection(
external_layer: bool = False,
skin: Union[bool, List[int]] = False,
expand_cyclic: Union[bool, List[Union[int, List[int]]]] = True,
- ) -> Selection:
+ average_per_body: Optional[bool] = False,
+ ) -> (Selection, Optional[_Rescoping]):
tot = (
(node_ids is not None)
+ (element_ids is not None)
@@ -847,9 +599,38 @@ def _build_selection(
"Arguments selection, skin, and external_layer are mutually exclusive"
)
if selection is not None:
- return selection
+ return selection, None
else:
selection = Selection(server=self._model._server)
+
+ if isinstance(skin, bool):
+ has_skin = skin
+ else:
+ has_skin = len(skin) > 0
+
+ requires_manual_averaging = _requires_manual_averaging(
+ base_name=base_name,
+ location=location,
+ category=category,
+ has_skin=has_skin,
+ has_external_layer=external_layer,
+ create_operator_callable=self._model.operator,
+ average_per_body=average_per_body,
+ )
+
+ rescoping = None
+ if requires_manual_averaging:
+ if node_ids is not None and location == locations.nodal:
+ rescoping = _Rescoping(requested_location=location, node_ids=node_ids)
+
+ if named_selections:
+ rescoping = _Rescoping(
+ requested_location=location, named_selections=named_selections
+ )
+
+ if requires_manual_averaging and location != locations.elemental_nodal:
+ location = locations.elemental_nodal
+
# Create the SpatialSelection
# First: the skin and the external layer to be able to have both a mesh scoping and
@@ -862,11 +643,7 @@ def _build_selection(
if base_name in _result_properties
else None
)
- location = (
- locations.elemental_nodal
- if self._requires_manual_averaging(base_name, location, category, None)
- else location
- )
+
if external_layer not in [None, False]:
selection.select_external_layer(
elements=external_layer if external_layer is not True else None,
@@ -891,7 +668,9 @@ def _build_selection(
)
if named_selections:
selection.select_named_selection(
- named_selection=named_selections, location=location
+ named_selection=named_selections,
+ location=location,
+ inclusive=requires_manual_averaging,
)
elif element_ids is not None:
if location == locations.nodal:
@@ -900,11 +679,11 @@ def _build_selection(
selection.select_elements(elements=element_ids)
elif node_ids is not None:
if location != locations.nodal:
- raise ValueError(
- "Argument 'node_ids' can only be used if 'location' "
- "is equal to 'post.locations.nodal'."
+ selection.select_elements_of_nodes(
+ nodes=node_ids, mesh=self.mesh, inclusive=requires_manual_averaging
)
- selection.select_nodes(nodes=node_ids)
+ else:
+ selection.select_nodes(nodes=node_ids)
# Create the TimeFreqSelection
if all_sets:
@@ -971,70 +750,11 @@ def _build_selection(
if isinstance(load_steps, int):
load_steps = [load_steps]
selection.time_freq_selection.select_load_steps(load_steps=load_steps)
- return selection
+ return selection, rescoping
else:
# Otherwise, no argument was given, create a time_freq_scoping of the last set only
selection.select_time_freq_sets(
time_freq_sets=[self.time_freq_support.n_sets]
)
- return selection
-
- def _requires_manual_averaging(
- self,
- base_name: str,
- location: str,
- category: ResultCategory,
- selection: Selection,
- ):
- res = _result_properties[base_name] if base_name in _result_properties else None
- if category == ResultCategory.equivalent and base_name[0] == "E": # strain eqv
- return True
- if res is not None and selection is not None:
- return selection.requires_manual_averaging(
- location=location,
- result_native_location=res["location"],
- is_model_cyclic=self._model.operator("is_cyclic").eval(),
- )
- return False
-
- def _create_averaging_operator(
- self,
- location: str,
- selection: Selection,
- ):
- average_op = None
- first_average_op = None
- forward = None
- if _WfNames.skin in selection.spatial_selection._selection.output_names:
- if self._model._server.meet_version("6.2"):
- first_average_op = self._model.operator(name="solid_to_skin_fc")
- forward = first_average_op
- else:
- first_average_op = self._model.operator(name="solid_to_skin")
- forward = self._model.operator(name="forward_fc")
- forward.connect(0, first_average_op, 0)
- average_wf = dpf.Workflow(server=self._model._server)
- if hasattr(first_average_op.inputs, "mesh_scoping"):
- inpt = (
- first_average_op.inputs.mesh_scoping
- ) # To keep for retro-compatibility
- else:
- inpt = first_average_op.inputs.mesh
- average_wf.set_input_name(_WfNames.skin, inpt)
- average_wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names={_WfNames.skin: _WfNames.skin},
- )
-
- if location == locations.nodal:
- average_op = self._model.operator(name="to_nodal_fc")
- elif location == locations.elemental:
- average_op = self._model.operator(name="to_elemental_fc")
- if average_op and forward:
- average_op.connect(0, forward, 0)
- else:
- first_average_op = average_op
-
- if first_average_op is not None and average_op is not None:
- return (first_average_op, average_op)
+ return selection, rescoping
diff --git a/src/ansys/dpf/post/static_mechanical_simulation.py b/src/ansys/dpf/post/static_mechanical_simulation.py
index 3f9d5c8b2..0ceb8b979 100644
--- a/src/ansys/dpf/post/static_mechanical_simulation.py
+++ b/src/ansys/dpf/post/static_mechanical_simulation.py
@@ -4,13 +4,26 @@
--------------------------
"""
-from typing import List, Tuple, Union
+from typing import List, Optional, Tuple, Union
from ansys.dpf import core
from ansys.dpf.post import locations
from ansys.dpf.post.dataframe import DataFrame
+from ansys.dpf.post.result_workflows._build_workflow import (
+ _create_result_workflow_inputs,
+ _create_result_workflows,
+)
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._connect_workflow_inputs import (
+ _connect_averaging_eqv_and_principal_workflows,
+ _connect_workflow_inputs,
+)
+from ansys.dpf.post.result_workflows._utils import AveragingConfig, _append_workflows
from ansys.dpf.post.selection import Selection, _WfNames
-from ansys.dpf.post.simulation import MechanicalSimulation, ResultCategory
+from ansys.dpf.post.simulation import MechanicalSimulation, _Rescoping
class StaticMechanicalSimulation(MechanicalSimulation):
@@ -26,145 +39,59 @@ def _get_result_workflow(
selection: Union[Selection, None] = None,
expand_cyclic: Union[bool, List[Union[int, List[int]]]] = True,
phase_angle_cyclic: Union[float, None] = None,
+ averaging_config: AveragingConfig = AveragingConfig(),
+ rescoping: Optional[_Rescoping] = None,
) -> (core.Workflow, Union[str, list[str], None], str):
"""Generate (without evaluating) the Workflow to extract results."""
- comp, to_extract, _ = self._create_components(base_name, category, components)
-
- force_elemental_nodal = self._requires_manual_averaging(
+ result_workflow_inputs = _create_result_workflow_inputs(
base_name=base_name,
- location=location,
category=category,
+ components=components,
+ norm=norm,
+ location=location,
selection=selection,
+ create_operator_callable=self._model.operator,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
-
- # Initialize a workflow
- wf, result_op = self._build_result_workflow(
- name=base_name,
+ result_workflows = _create_result_workflows(
+ server=self._model._server,
+ create_operator_callable=self._model.operator,
+ create_workflow_inputs=result_workflow_inputs,
+ )
+ _connect_workflow_inputs(
+ initial_result_workflow=result_workflows.initial_result_workflow,
+ split_by_body_workflow=result_workflows.split_by_bodies_workflow,
+ rescoping_workflow=result_workflows.rescoping_workflow,
+ selection=selection,
+ data_sources=self._model.metadata.data_sources,
+ streams_provider=self._model.metadata.streams_provider,
+ expand_cyclic=expand_cyclic,
+ phase_angle_cyclic=phase_angle_cyclic,
+ mesh=self.mesh._meshed_region,
location=location,
- force_elemental_nodal=force_elemental_nodal,
+ force_elemental_nodal=result_workflows.force_elemental_nodal,
+ averaging_config=averaging_config,
)
- # Its output is selected as future workflow output for now
- out = result_op.outputs.fields_container
- # Its inputs are selected as workflow inputs for merging with selection workflows
- wf.set_input_name("time_scoping", result_op.inputs.time_scoping)
- wf.set_input_name("mesh_scoping", result_op.inputs.mesh_scoping)
-
- wf.connect_with(
- selection.time_freq_selection._selection,
- output_input_names=("scoping", "time_scoping"),
- )
- if selection.requires_mesh:
- mesh_wf = core.Workflow(server=self._model._server)
- mesh_wf.add_operator(self._model.metadata.mesh_provider)
- mesh_wf.set_output_name(
- _WfNames.initial_mesh, self._model.metadata.mesh_provider
- )
- selection.spatial_selection._selection.connect_with(
- mesh_wf,
- output_input_names={_WfNames.initial_mesh: _WfNames.initial_mesh},
- )
+ output_wf = _connect_averaging_eqv_and_principal_workflows(result_workflows)
- wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names={"scoping": "mesh_scoping"},
+ output_wf = _append_workflows(
+ [
+ result_workflows.component_extraction_workflow,
+ result_workflows.norm_workflow,
+ result_workflows.rescoping_workflow,
+ ],
+ output_wf,
)
- # Treat cyclic cases
- wf = self._treat_cyclic(expand_cyclic, phase_angle_cyclic, wf)
+ output_wf.progress_bar = False
- # Connect data_sources and streams_container inputs of selection if necessary
- if "streams" in wf.input_names:
- wf.connect("streams", self._model.metadata.streams_provider)
- if "data_sources" in wf.input_names:
- wf.connect("data_sources", self._model.metadata.data_sources)
-
- average_op = None
- if force_elemental_nodal:
- average_op = self._create_averaging_operator(
- location=location, selection=selection
- )
-
- # Add a step to compute principal invariants if result is principal
- if category == ResultCategory.principal:
- # Instantiate the required operator
- principal_op = self._model.operator(name="invariants_fc")
- # Corresponds to scripting name principal_invariants
- if average_op is not None:
- average_op[0].connect(0, out)
- principal_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- average_op = None
- else:
- principal_op.connect(0, out)
- wf.add_operator(operator=principal_op)
- # Set as future output of the workflow
- if len(to_extract) == 1:
- out = getattr(principal_op.outputs, f"fields_eig_{to_extract[0]+1}")
- else:
- raise NotImplementedError("Cannot combine principal results yet.")
- # We need to define the behavior for storing different results in a DataFrame
-
- # Add a step to compute equivalent if result is equivalent
- elif category == ResultCategory.equivalent:
- equivalent_op = self._model.operator(name="eqv_fc")
- wf.add_operator(operator=equivalent_op)
- # If a strain result, change the location now
- if (
- average_op is not None
- and category == ResultCategory.equivalent
- and base_name[0] == "E"
- ):
- equivalent_op.connect(0, out)
- average_op[0].connect(0, equivalent_op)
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = average_op[1].outputs.fields_container
- elif average_op is not None:
- average_op[0].connect(0, out)
- equivalent_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = equivalent_op.outputs.fields_container
- else:
- equivalent_op.connect(0, out)
- out = equivalent_op.outputs.fields_container
-
- average_op = None
- base_name += "_VM"
-
- if average_op is not None:
- average_op[0].connect(0, out)
- wf.add_operators(list(average_op))
- out = average_op[1].outputs.fields_container
-
- # Add an optional component selection step if result is vector, matrix, or principal
- if (category in [ResultCategory.vector, ResultCategory.matrix]) and (
- to_extract is not None
- ):
- # Instantiate a component selector operator
- extract_op = self._model.operator(name="component_selector_fc")
- # Feed it the current workflow output
- extract_op.connect(0, out)
- # Feed it the requested components
- extract_op.connect(1, to_extract)
- wf.add_operator(operator=extract_op)
- # Set as future output of the workflow
- out = extract_op.outputs.fields_container
- if len(to_extract) == 1:
- base_name += f"_{comp[0]}"
- comp = None
-
- # Add an optional norm operation if requested
- if norm:
- wf, out, comp, base_name = self._append_norm(wf, out, base_name)
-
- # Set the workflow output
- wf.set_output_name("out", out)
- wf.progress_bar = False
-
- return wf, comp, base_name
+ return (
+ output_wf,
+ result_workflows.components,
+ result_workflows.base_name,
+ )
def _get_result(
self,
@@ -187,6 +114,7 @@ def _get_result(
phase_angle_cyclic: Union[float, None] = None,
external_layer: Union[bool, List[int]] = False,
skin: Union[bool, List[int]] = False,
+ averaging_config: AveragingConfig = AveragingConfig(),
) -> DataFrame:
"""Extract results from the simulation.
@@ -254,6 +182,10 @@ def _get_result(
is computed over list of elements (not supported for cyclic symmetry). Getting the
skin on more than one result (several time freq sets, split data...) is only
supported starting with Ansys 2023R2.
+ averaging_config:
+ Per default averaging happens across all bodies. The averaging config
+ can define that averaging happens per body and defines the properties that
+ are used to define a body.
Returns
-------
@@ -274,7 +206,7 @@ def _get_result(
"and load_steps are mutually exclusive."
)
- selection = self._build_selection(
+ selection, rescoping = self._build_selection(
base_name=base_name,
category=category,
selection=selection,
@@ -288,6 +220,7 @@ def _get_result(
location=location,
external_layer=external_layer,
skin=skin,
+ average_per_body=averaging_config.average_per_body,
)
wf, comp, base_name = self._get_result_workflow(
@@ -299,10 +232,12 @@ def _get_result(
selection=selection,
expand_cyclic=expand_cyclic,
phase_angle_cyclic=phase_angle_cyclic,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
# Evaluate the workflow
- fc = wf.get_output("out", core.types.fields_container)
+ fc = wf.get_output(_WfNames.output_data, core.types.fields_container)
disp_wf = self._generate_disp_workflow(fc, selection)
submesh = None
@@ -312,7 +247,7 @@ def _get_result(
_WfNames.mesh, core.types.meshed_region
)
- _, _, columns = self._create_components(base_name, category, components)
+ _, _, columns = _create_components(base_name, category, components)
return self._create_dataframe(
fc, location, columns, comp, base_name, disp_wf, submesh
diff --git a/src/ansys/dpf/post/transient_mechanical_simulation.py b/src/ansys/dpf/post/transient_mechanical_simulation.py
index baba2e7c2..38d685173 100644
--- a/src/ansys/dpf/post/transient_mechanical_simulation.py
+++ b/src/ansys/dpf/post/transient_mechanical_simulation.py
@@ -4,13 +4,30 @@
-----------------------------
"""
-from typing import List, Tuple, Union
+from typing import List, Optional, Tuple, Union
from ansys.dpf import core as dpf
from ansys.dpf.post import locations
from ansys.dpf.post.dataframe import DataFrame
+from ansys.dpf.post.result_workflows._build_workflow import (
+ _create_result_workflow_inputs,
+ _create_result_workflows,
+)
+from ansys.dpf.post.result_workflows._component_helper import (
+ ResultCategory,
+ _create_components,
+)
+from ansys.dpf.post.result_workflows._connect_workflow_inputs import (
+ _connect_averaging_eqv_and_principal_workflows,
+ _connect_workflow_inputs,
+)
+from ansys.dpf.post.result_workflows._utils import (
+ AveragingConfig,
+ _append_workflows,
+ _Rescoping,
+)
from ansys.dpf.post.selection import Selection, _WfNames
-from ansys.dpf.post.simulation import MechanicalSimulation, ResultCategory
+from ansys.dpf.post.simulation import MechanicalSimulation
class TransientMechanicalSimulation(MechanicalSimulation):
@@ -24,146 +41,59 @@ def _get_result_workflow(
components: Union[str, List[str], int, List[int], None] = None,
norm: bool = False,
selection: Union[Selection, None] = None,
+ averaging_config: AveragingConfig = AveragingConfig(),
+ rescoping: Optional[_Rescoping] = None,
) -> (dpf.Workflow, Union[str, list[str], None], str):
"""Generate (without evaluating) the Workflow to extract results."""
- comp, to_extract, _ = self._create_components(base_name, category, components)
-
- force_elemental_nodal = self._requires_manual_averaging(
+ result_workflow_inputs = _create_result_workflow_inputs(
base_name=base_name,
- location=location,
category=category,
- selection=selection,
- )
-
- # Instantiate the main result operator
- wf, result_op = self._build_result_workflow(
- name=base_name,
+ components=components,
+ norm=norm,
location=location,
- force_elemental_nodal=force_elemental_nodal,
+ selection=selection,
+ create_operator_callable=self._model.operator,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
- # Its output is selected as future workflow output for now
- out = result_op.outputs.fields_container
- # Its inputs are selected as workflow inputs for merging with selection workflows
- wf.set_input_name("time_scoping", result_op.inputs.time_scoping)
- wf.set_input_name("mesh_scoping", result_op.inputs.mesh_scoping)
-
- wf.connect_with(
- selection.time_freq_selection._selection,
- output_input_names=("scoping", "time_scoping"),
+ result_workflows = _create_result_workflows(
+ server=self._model._server,
+ create_operator_callable=self._model.operator,
+ create_workflow_inputs=result_workflow_inputs,
)
- if selection.requires_mesh:
- # wf.set_input_name(_WfNames.mesh, result_op.inputs.mesh)
- mesh_wf = dpf.Workflow(server=self._model._server)
- mesh_wf.add_operator(self._model.metadata.mesh_provider)
- mesh_wf.set_output_name(
- _WfNames.initial_mesh, self._model.metadata.mesh_provider
- )
- selection.spatial_selection._selection.connect_with(
- mesh_wf,
- output_input_names={_WfNames.initial_mesh: _WfNames.initial_mesh},
- )
- wf.connect_with(
- selection.spatial_selection._selection,
- output_input_names={
- "scoping": "mesh_scoping",
- },
+ _connect_workflow_inputs(
+ initial_result_workflow=result_workflows.initial_result_workflow,
+ split_by_body_workflow=result_workflows.split_by_bodies_workflow,
+ rescoping_workflow=result_workflows.rescoping_workflow,
+ selection=selection,
+ data_sources=self._model.metadata.data_sources,
+ streams_provider=self._model.metadata.streams_provider,
+ expand_cyclic=False,
+ phase_angle_cyclic=None,
+ mesh=self.mesh._meshed_region,
+ location=location,
+ force_elemental_nodal=result_workflows.force_elemental_nodal,
+ averaging_config=averaging_config,
)
- # Connect data_sources and streams_container inputs of selection if necessary
- if "streams" in wf.input_names:
- wf.connect("streams", self._model.metadata.streams_provider)
- if "data_sources" in wf.input_names:
- wf.connect("data_sources", self._model.metadata.data_sources)
+ output_wf = _connect_averaging_eqv_and_principal_workflows(result_workflows)
- average_op = None
- if force_elemental_nodal:
- average_op = self._create_averaging_operator(
- location=location, selection=selection
- )
+ output_wf = _append_workflows(
+ [
+ result_workflows.component_extraction_workflow,
+ result_workflows.norm_workflow,
+ result_workflows.rescoping_workflow,
+ ],
+ output_wf,
+ )
- # Add a step to compute principal invariants if result is principal
- if category == ResultCategory.principal:
- # Instantiate the required operator
- principal_op = self._model.operator(name="invariants_fc")
- # Corresponds to scripting name principal_invariants
- if average_op is not None:
- average_op[0].connect(0, out)
- principal_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- average_op = None
- else:
- principal_op.connect(0, out)
- wf.add_operator(operator=principal_op)
- # Set as future output of the workflow
- if len(to_extract) == 1:
- out = getattr(principal_op.outputs, f"fields_eig_{to_extract[0]+1}")
- else:
- raise NotImplementedError("Cannot combine principal results yet.")
- # We need to define the behavior for storing different results in a DataFrame
-
- # Add a step to compute equivalent if result is equivalent
- elif category == ResultCategory.equivalent:
- equivalent_op = self._model.operator(name="eqv_fc")
- wf.add_operator(operator=equivalent_op)
- # If a strain result, change the location now
- if (
- average_op is not None
- and category == ResultCategory.equivalent
- and base_name[0] == "E"
- ):
- equivalent_op.connect(0, out)
- average_op[0].connect(0, equivalent_op)
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = average_op[1].outputs.fields_container
- elif average_op is not None:
- average_op[0].connect(0, out)
- equivalent_op.connect(0, average_op[1])
- wf.add_operators(list(average_op))
- # Set as future output of the workflow
- out = equivalent_op.outputs.fields_container
- else:
- equivalent_op.connect(0, out)
- out = equivalent_op.outputs.fields_container
- average_op = None
- base_name += "_VM"
-
- if average_op is not None:
- average_op[0].connect(0, out)
- wf.add_operators(list(average_op))
- out = average_op[1].outputs.fields_container
-
- # Add an optional component selection step if result is vector, matrix, or principal
- if (
- category
- in [
- ResultCategory.vector,
- ResultCategory.matrix,
- ]
- ) and (to_extract is not None):
- # Instantiate a component selector operator
- extract_op = self._model.operator(name="component_selector_fc")
- # Feed it the current workflow output
- extract_op.connect(0, out)
- # Feed it the requested components
- extract_op.connect(1, to_extract)
- wf.add_operator(operator=extract_op)
- # Set as future output of the workflow
- out = extract_op.outputs.fields_container
- if len(to_extract) == 1:
- base_name += f"_{comp[0]}"
- comp = None
-
- # Add an optional norm operation if requested
- if norm:
- wf, out, comp, base_name = self._append_norm(wf, out, base_name)
-
- # Set the workflow output
- wf.set_output_name("out", out)
- wf.progress_bar = False
-
- return wf, comp, base_name
+ output_wf.progress_bar = False
+
+ return (
+ output_wf,
+ result_workflows.components,
+ result_workflows.base_name,
+ )
def _get_result(
self,
@@ -184,6 +114,7 @@ def _get_result(
named_selections: Union[List[str], str, None] = None,
external_layer: Union[bool, List[int]] = False,
skin: Union[bool, List[int]] = False,
+ averaging_config: AveragingConfig = AveragingConfig(),
) -> DataFrame:
"""Extract results from the simulation.
@@ -245,6 +176,10 @@ def _get_result(
is computed over list of elements (not supported for cyclic symmetry). Getting the
skin on more than one result (several time freq sets, split data...) is only
supported starting with Ansys 2023R2.
+ averaging_config:
+ Per default averaging happens across all bodies. The averaging config
+ can define that averaging happens per body and defines the properties that
+ are used to define a body.
Returns
-------
@@ -265,7 +200,7 @@ def _get_result(
"and load_steps are mutually exclusive."
)
- selection = self._build_selection(
+ selection, rescoping = self._build_selection(
base_name=base_name,
category=category,
selection=selection,
@@ -288,10 +223,12 @@ def _get_result(
components=components,
norm=norm,
selection=selection,
+ averaging_config=averaging_config,
+ rescoping=rescoping,
)
# Evaluate the workflow
- fc = wf.get_output("out", dpf.types.fields_container)
+ fc = wf.get_output(_WfNames.output_data, dpf.types.fields_container)
disp_wf = self._generate_disp_workflow(fc, selection)
@@ -302,7 +239,7 @@ def _get_result(
_WfNames.mesh, dpf.types.meshed_region
)
- _, _, columns = self._create_components(base_name, category, components)
+ _, _, columns = _create_components(base_name, category, components)
return self._create_dataframe(
fc, location, columns, comp, base_name, disp_wf=disp_wf, submesh=submesh
)
diff --git a/tests/conftest.py b/tests/conftest.py
index 0ee486390..199da1aba 100755
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,10 +3,13 @@
Launch or connect to a persistent local DPF service to be shared in
pytest as a sesson fixture
"""
+import dataclasses
import os
+import pathlib
import re
from ansys.dpf.core.check_version import get_server_version, meets_version
+from ansys.dpf.core.examples.downloads import _download_file
import matplotlib as mpl
import pytest
import pyvista as pv
@@ -44,6 +47,12 @@ def get_lighting():
running_docker = os.environ.get("DPF_DOCKER", False)
+def save_screenshot(dataframe, suffix=""):
+ """Save a screenshot of a dataframe plot, with the current test name."""
+ test_path = pathlib.Path(os.environ.get("PYTEST_CURRENT_TEST"))
+ dataframe.plot(screenshot=f"{'_'.join(test_path.name.split('::'))}_{suffix}.jpeg")
+
+
def resolve_test_file(basename, additional_path=""):
"""Resolves a test file's full path based on the base name and the
environment.
@@ -141,6 +150,61 @@ def plate_msup():
return examples.msup_transient
+@pytest.fixture()
+def average_per_body_two_cubes():
+ return _download_file(
+ "result_files/average_per_body/two_cubes", "file.rst", True, None, False
+ )
+
+
+@pytest.fixture()
+def average_per_body_complex_multi_body():
+ return _download_file(
+ "result_files/average_per_body/complex_multi_body",
+ "file.rst",
+ True,
+ None,
+ False,
+ )
+
+
+@dataclasses.dataclass
+class ReferenceCsvFiles:
+ # reference result with all bodies combined
+ # The node ids of nodes at body interfaces are duplicated
+ combined: pathlib.Path
+ # reference result per body (node ids are unique)
+ per_id: dict[str, pathlib.Path]
+
+
+def get_per_body_ref_files(
+ root_path: str, n_bodies: int
+) -> dict[str, ReferenceCsvFiles]:
+ ref_files = {}
+ for result in ["stress", "elastic_strain"]:
+ per_mat_id_dict = {}
+ for mat in range(1, n_bodies + 1):
+ per_mat_id_dict[str(mat)] = _download_file(
+ root_path, f"{result}_mat_{mat}.txt", True, None, False
+ )
+ combined = _download_file(
+ root_path, f"{result}_combined.txt", True, None, False
+ )
+ ref_files[result] = ReferenceCsvFiles(combined=combined, per_id=per_mat_id_dict)
+
+ return ref_files
+
+
+@pytest.fixture()
+def average_per_body_complex_multi_body_ref():
+ return get_per_body_ref_files("result_files/average_per_body/complex_multi_body", 7)
+
+
+@pytest.fixture()
+def average_per_body_two_cubes_ref():
+ return get_per_body_ref_files("result_files/average_per_body/two_cubes", 2)
+
+
@pytest.fixture()
def rth_transient():
"""Resolve the path of the "rth/rth_transient.rth" result file."""
@@ -204,6 +268,25 @@ def grpc_server():
server.shutdown()
+@pytest.fixture(scope="session", autouse=True)
+def license_context():
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2:
+ with core.LicenseContextManager(
+ increment_name="preppost", license_timeout_in_seconds=1.0
+ ):
+ yield
+ else:
+ yield
+
+
+SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1 = meets_version(
+ get_server_version(core._global_server()), "9.1"
+)
+
+SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0 = meets_version(
+ get_server_version(core._global_server()), "9.0"
+)
+
SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0 = meets_version(
get_server_version(core._global_server()), "8.0"
)
diff --git a/tests/test_dataframe.py b/tests/test_dataframe.py
index 81793c94f..f831b0510 100644
--- a/tests/test_dataframe.py
+++ b/tests/test_dataframe.py
@@ -131,8 +131,8 @@ def test_dataframe_select_cells():
reason="Fluid capabilities added with ansys-dpf-server 2024.1.pre0.",
)
def test_dataframe_select_with_labels():
- fluid_files = examples.download_cfx_mixing_elbow()
- simulation = post.FluidSimulation(cas=fluid_files["cas"], dat=fluid_files["dat"])
+ fluid_file = examples.download_cfx_mixing_elbow()
+ simulation = post.FluidSimulation(fluid_file)
df = simulation.enthalpy()
df2 = df.select(node_ids=[1])
ref = """
diff --git a/tests/test_dpfresultwithkeywords.py b/tests/test_dpfresultwithkeywords.py
index 1f32e3cec..ea11ce2fd 100755
--- a/tests/test_dpfresultwithkeywords.py
+++ b/tests/test_dpfresultwithkeywords.py
@@ -7,6 +7,7 @@
from ansys import dpf
from ansys.dpf import post
+from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0
def test_displacement_with_scoping_verbose_api(allkindofcomplexity):
@@ -261,7 +262,10 @@ def test_groupingelshape_nodallocation_verbose_api(allkindofcomplexity):
assert disp.result_fields_container.get_label_space(3) == {"elshape": 3, "time": 1}
assert len(disp.get_data_at_field(0)) == 14826
assert len(disp.get_data_at_field(1)) == 1486
- assert len(disp.get_data_at_field(2)) == 19
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ assert len(disp.get_data_at_field(2)) == 21
+ else:
+ assert len(disp.get_data_at_field(2)) == 19
assert len(disp.get_data_at_field(3)) == 4
assert np.isclose(disp.get_data_at_field(2)[0][0], 5.523488975819807e-20)
assert disp[0].location == locations.nodal
@@ -275,7 +279,10 @@ def test_groupingelshape_nodallocation(allkindofcomplexity):
assert disp.result_fields_container.get_label_space(3) == {"elshape": 3, "time": 1}
assert len(disp.get_data_at_field(0)) == 14826
assert len(disp.get_data_at_field(1)) == 1486
- assert len(disp.get_data_at_field(2)) == 19
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ assert len(disp.get_data_at_field(2)) == 21
+ else:
+ assert len(disp.get_data_at_field(2)) == 19
assert len(disp.get_data_at_field(3)) == 4
assert np.isclose(disp.get_data_at_field(2)[0][0], 5.523488975819807e-20)
assert disp[0].location == locations.nodal
@@ -367,7 +374,10 @@ def test_groupingelshape_elemlocation(allkindofcomplexity):
def test_groupingmat_nodallocation_verbose_api(allkindofcomplexity):
result = post.load_solution(allkindofcomplexity)
disp = result.misc.nodal_displacement(grouping=post.grouping.by_material)
- assert disp.num_fields == 11
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ assert disp.num_fields == 13
+ else:
+ assert disp.num_fields == 11
assert len(disp[0]) == 6288
assert len(disp[2]) == 744
assert np.isclose(disp.get_data_at_field(2)[0][2], -6.649053654123576e-07)
@@ -381,7 +391,10 @@ def test_groupingmat_nodallocation(allkindofcomplexity):
result = post.load_solution(allkindofcomplexity)
d = result.displacement(grouping=post.grouping.by_material)
disp = d.vector
- assert disp.num_fields == 11
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ assert disp.num_fields == 13
+ else:
+ assert disp.num_fields == 11
assert len(disp[0]) == 6288
assert len(disp[2]) == 744
assert np.isclose(disp.get_data_at_field(2)[0][2], -6.649053654123576e-07)
diff --git a/tests/test_selection.py b/tests/test_selection.py
index a9eaa1c62..7360504d5 100644
--- a/tests/test_selection.py
+++ b/tests/test_selection.py
@@ -6,7 +6,10 @@
from ansys.dpf import post
from ansys.dpf.post import examples
from ansys.dpf.post.selection import SpatialSelection
-from conftest import SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0
+from conftest import (
+ SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0,
+ SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1,
+)
def test_spatial_selection_select_nodes(allkindofcomplexity):
@@ -99,7 +102,11 @@ def test_spatial_selection_select_faces_of_elements(self, fluent_simulation):
)
scoping = selection._evaluate_on(fluent_simulation)
assert scoping.location == post.locations.faces
- assert np.allclose(scoping.ids, [11479, 11500, -1, 11502, 11503])
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1:
+ list_ref = [11479, 11500, -1, 11502, 11503]
+ else:
+ list_ref = [12481, 12502, 39941, 43681, 12504, 12505]
+ assert np.allclose(scoping.ids, list_ref)
#
diff --git a/tests/test_simulation.py b/tests/test_simulation.py
index 701f62662..ffa920315 100644
--- a/tests/test_simulation.py
+++ b/tests/test_simulation.py
@@ -1,21 +1,448 @@
+import csv
+import dataclasses
import os.path
+import pathlib
+from typing import Optional, Union
import ansys.dpf.core as dpf
+from ansys.dpf.core import (
+ Field,
+ FieldsContainer,
+ MeshedRegion,
+ Scoping,
+ element_types,
+ natures,
+ operators,
+)
+from ansys.dpf.gate.common import locations
import numpy as np
import pytest
from pytest import fixture
from ansys.dpf import post
+from ansys.dpf.post import StaticMechanicalSimulation
from ansys.dpf.post.common import AvailableSimulationTypes, elemental_properties
from ansys.dpf.post.index import ref_labels
from ansys.dpf.post.meshes import Meshes
+from ansys.dpf.post.result_workflows._component_helper import ResultCategory
+from ansys.dpf.post.result_workflows._utils import (
+ AveragingConfig,
+ _CreateOperatorCallable,
+)
+from ansys.dpf.post.selection import _WfNames
+from ansys.dpf.post.simulation import MechanicalSimulation, Simulation
from conftest import (
SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0,
SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_6_2,
SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1,
+ SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0,
+ SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0,
+ SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1,
+ ReferenceCsvFiles,
)
+def is_principal(mode: Optional[str]) -> bool:
+ return mode == "principal"
+
+
+def is_equivalent(mode: Optional[str]) -> bool:
+ return mode == "equivalent"
+
+
+def mode_suffix(mode: Optional[str]) -> str:
+ if mode == "equivalent":
+ return "_eqv_von_mises"
+ elif mode == "principal":
+ return "_principal"
+ return ""
+
+
+def get_expected_elemental_average_skin_value(
+ element_id: int,
+ solid_mesh: MeshedRegion,
+ skin_mesh: MeshedRegion,
+ elemental_nodal_solid_data_field: Field,
+ is_principal_strain_result: bool,
+) -> dict[int, float]:
+ """
+ Get the average skin value of all the skin elements that belong to the solid
+ element with the element_id.
+ Returns a dictionary with the expected average skin values indexed
+ by the skin element id.
+ """
+
+ elemental_nodal_solid_data = elemental_nodal_solid_data_field.get_entity_data_by_id(
+ element_id
+ )
+
+ # Find all nodes connected to this solid element
+ connected_node_indices = (
+ solid_mesh.elements.connectivities_field.get_entity_data_by_id(element_id)
+ )
+ connected_node_ids = solid_mesh.nodes.scoping.ids[connected_node_indices]
+
+ # Find the skin elements attached to all the nodes of the solid element
+ # Note: This includes elements that are not skin elements of
+ # a particular solid element (because not all nodes are part of the solid element)
+ skin_element_ids = set()
+ for connected_node_id in connected_node_ids:
+ skin_element_index = (
+ skin_mesh.nodes.nodal_connectivity_field.get_entity_data_by_id(
+ connected_node_id
+ )
+ )
+ skin_element_ids.update(skin_mesh.elements.scoping.ids[skin_element_index])
+
+ expected_average_skin_values = {}
+ for skin_element_id in skin_element_ids:
+ # Go through all the skin element candidates and check if all their nodes are
+ # part of the solid element.
+ skin_node_indices = (
+ skin_mesh.elements.connectivities_field.get_entity_data_by_id(
+ skin_element_id
+ )
+ )
+ node_ids = skin_mesh.nodes.scoping.ids[skin_node_indices]
+
+ node_missing = False
+ for node_id in node_ids:
+ if node_id not in connected_node_ids:
+ node_missing = True
+ break
+
+ if node_missing:
+ # If a node is missing the skin element does not belong to the solid element
+ continue
+
+ # Get the elementary indices of the connected nodes in the solid
+ # ElementalNodal data
+ indices_to_average = []
+ for idx, node_id in enumerate(connected_node_ids):
+ if node_id in node_ids:
+ indices_to_average.append(idx)
+
+ # Skip indices that are out of bounds (these are the mid side nodes)
+ indices_to_average = [
+ idx for idx in indices_to_average if idx < len(elemental_nodal_solid_data)
+ ]
+ if elemental_nodal_solid_data_field.component_count > 1:
+ data_to_average = elemental_nodal_solid_data[indices_to_average, :]
+ else:
+ data_to_average = elemental_nodal_solid_data[indices_to_average]
+
+ average = np.mean(data_to_average, axis=0)
+ if is_principal_strain_result:
+ # Workaround: The principal operator divides
+ # offdiagonal components by 2 if the input field has
+ # a integer "strain" property set. Since int
+ # field properties are not exposed in python, division is done
+ # here before the the data is passed to the principle operator
+ average[3:7] = average[3:7] / 2
+
+ expected_average_skin_values[skin_element_id] = average
+ return expected_average_skin_values
+
+
+def get_expected_skin_results(
+ create_operator_callable: _CreateOperatorCallable,
+ element_ids: list[int],
+ elemental_nodal_fc: FieldsContainer,
+ meshed_region: MeshedRegion,
+ mode: Optional[str],
+ result_name: str,
+):
+ expected_skin_values = {}
+ for element_id in element_ids:
+ expected_skin_values_per_element = get_expected_elemental_average_skin_value(
+ element_id=element_id,
+ solid_mesh=elemental_nodal_fc[0].meshed_region,
+ skin_mesh=meshed_region,
+ elemental_nodal_solid_data_field=elemental_nodal_fc[0],
+ is_principal_strain_result=is_principal(mode)
+ and result_name == "elastic_strain",
+ )
+
+ if is_principal(mode) or (
+ is_equivalent(mode) and result_name != "elastic_strain"
+ ):
+ # We need to put the expected skin values in a Field, to compute
+ # the equivalent or principal values with a dpf operator.
+ # For the elastic strain result, the invariants are computed before the
+ # averaging
+ field = Field(nature=natures.symmatrix)
+ for (
+ skin_element_id,
+ expected_skin_value,
+ ) in expected_skin_values_per_element.items():
+ field.append(list(expected_skin_value), skin_element_id)
+ if is_principal(mode):
+ invariant_op = operators.invariant.principal_invariants()
+ invariant_op.inputs.field(field)
+ field_out = invariant_op.outputs.field_eig_1()
+ else:
+ invariant_op = create_operator_callable(name="eqv")
+ invariant_op.inputs.field(field)
+ field_out = invariant_op.outputs.field()
+
+ for skin_element_id in expected_skin_values_per_element:
+ expected_skin_values_per_element[
+ skin_element_id
+ ] = field_out.get_entity_data_by_id(skin_element_id)
+ expected_skin_values.update(expected_skin_values_per_element)
+ return expected_skin_values
+
+
+def get_and_check_elemental_skin_results(
+ static_simulation: StaticMechanicalSimulation,
+ fc_elemental_nodal: FieldsContainer,
+ result_name: str,
+ mode: str,
+ element_ids: list[int],
+ skin: Union[list[int], bool],
+ expand_cyclic: bool,
+):
+ """
+ Get the elemental skin results and check if they match the
+ expected average skin values.
+ """
+
+ # Not all the simulation types have the expand cyclic option
+ kwargs = {}
+ if expand_cyclic:
+ kwargs["expand_cyclic"] = True
+ result_skin_scoped_elemental = getattr(
+ static_simulation, f"{result_name}{mode_suffix(mode)}_elemental"
+ )(set_ids=[1], skin=skin, **kwargs)
+
+ if is_equivalent(mode) and result_name == "elastic_strain":
+ # For the elastic strain result, the equivalent strains are computed
+ # before the averaging
+ invariant_op = static_simulation._model.operator(name="eqv_fc")
+ invariant_op.inputs.fields_container(fc_elemental_nodal)
+ fc_elemental_nodal = invariant_op.outputs.fields_container()
+
+ expected_skin_values = get_expected_skin_results(
+ create_operator_callable=static_simulation._model.operator,
+ element_ids=element_ids,
+ elemental_nodal_fc=fc_elemental_nodal,
+ meshed_region=result_skin_scoped_elemental._fc[0].meshed_region,
+ mode=mode,
+ result_name=result_name,
+ )
+
+ for skin_element_id, expected_skin_value in expected_skin_values.items():
+ actual_skin_value = result_skin_scoped_elemental._fc[0].get_entity_data_by_id(
+ skin_element_id
+ )
+ assert np.allclose(actual_skin_value, expected_skin_value)
+
+
+operator_map = {"stress": "S", "elastic_strain": "EPEL", "displacement": "U"}
+
+
+def get_elemental_nodal_results(
+ simulation: Simulation,
+ result_name: str,
+ scoping: Scoping,
+ mode: str,
+ expand_cyclic: bool,
+):
+ time_id = 1
+ if expand_cyclic:
+ result_result_scoped_elemental = getattr(
+ simulation, f"{result_name}{mode_suffix(mode)}"
+ )(set_ids=[time_id], expand_cyclic=True)
+
+ return result_result_scoped_elemental._fc
+
+ else:
+ elemental_nodal_result_op = simulation._model.operator(
+ name=operator_map[result_name]
+ )
+ if scoping is not None:
+ elemental_nodal_result_op.inputs.mesh_scoping(scoping)
+ elemental_nodal_result_op.inputs.time_scoping([time_id])
+ elemental_nodal_result_op.inputs.requested_location("ElementalNodal")
+
+ return elemental_nodal_result_op.outputs.fields_container()
+
+
+def get_expected_nodal_averaged_skin_results(
+ skin_mesh: MeshedRegion,
+ solid_elemental_nodal_results: Field,
+ is_principal_strain_result: bool,
+):
+ nodal_averaged_skin_values = Field(
+ nature=solid_elemental_nodal_results.field_definition.dimensionality.nature,
+ location=locations.nodal,
+ )
+ solid_mesh = solid_elemental_nodal_results.meshed_region
+
+ all_midside_node_ids = get_all_midside_node_ids(
+ solid_elemental_nodal_results.meshed_region
+ )
+ for skin_node_id in skin_mesh.nodes.scoping.ids:
+ if skin_node_id in all_midside_node_ids:
+ # Skip midside nodes. We don't extract results for
+ # midside nodes
+ continue
+ solid_elements_indices = (
+ solid_mesh.nodes.nodal_connectivity_field.get_entity_data_by_id(
+ skin_node_id
+ )
+ )
+ solid_elements_ids = solid_mesh.elements.scoping.ids[solid_elements_indices]
+
+ solid_elemental_nodal_value = {}
+
+ # Get the correct elemental nodal value for each adjacent solid element
+ # will be later used to average the nodal values
+ for solid_element_id in solid_elements_ids:
+ if solid_element_id not in solid_elemental_nodal_results.scoping.ids:
+ # Solid element is connected to the node but does not have
+ # a value because it was not selected with the scoping
+ continue
+ solid_element_data = solid_elemental_nodal_results.get_entity_data_by_id(
+ solid_element_id
+ )
+ connected_node_indices = (
+ solid_mesh.elements.connectivities_field.get_entity_data_by_id(
+ solid_element_id
+ )
+ )
+ connected_node_ids = solid_mesh.nodes.scoping.ids[connected_node_indices]
+ node_index_in_elemental_data = np.where(connected_node_ids == skin_node_id)[
+ 0
+ ][0]
+ solid_elemental_nodal_value[solid_element_id] = solid_element_data[
+ node_index_in_elemental_data
+ ]
+
+ # Average over the adjacent skin elements
+ values_to_average = np.empty(
+ shape=(0, solid_elemental_nodal_results.component_count)
+ )
+ skin_element_indices = (
+ skin_mesh.nodes.nodal_connectivity_field.get_entity_data_by_id(skin_node_id)
+ )
+ skin_element_ids = skin_mesh.elements.scoping.ids[skin_element_indices]
+ for skin_element_id in skin_element_ids:
+ matching_solid_element_id = None
+ connected_skin_node_indices = (
+ skin_mesh.elements.connectivities_field.get_entity_data_by_id(
+ skin_element_id
+ )
+ )
+ connected_skin_node_ids = skin_mesh.nodes.scoping.ids[
+ connected_skin_node_indices
+ ]
+
+ for solid_element_id in solid_elemental_nodal_value.keys():
+ connected_solid_node_indices = (
+ solid_mesh.elements.connectivities_field.get_entity_data_by_id(
+ solid_element_id
+ )
+ )
+ connected_solid_node_ids = solid_mesh.nodes.scoping.ids[
+ connected_solid_node_indices
+ ]
+
+ if set(connected_skin_node_ids).issubset(connected_solid_node_ids):
+ matching_solid_element_id = solid_element_id
+ break
+
+ if matching_solid_element_id is None:
+ raise RuntimeError(
+ f"No matching solid element found for skin element {skin_element_id}"
+ )
+ skin_nodal_value = solid_elemental_nodal_value[matching_solid_element_id]
+ values_to_average = np.vstack((values_to_average, skin_nodal_value))
+ skin_values = np.mean(values_to_average, axis=0)
+ if is_principal_strain_result:
+ # Workaround: The principal operator divides
+ # offdiagonal components by 2 if the input field has
+ # a integer "strain" property set. Since int
+ # field properties are not exposed in python, division is done
+ # here before the the data is passed to the principle operator
+ skin_values[3:7] = skin_values[3:7] / 2
+
+ nodal_averaged_skin_values.append(list(skin_values), skin_node_id)
+ return nodal_averaged_skin_values
+
+
+def get_all_midside_node_ids(mesh: MeshedRegion):
+ all_midside_nodes = set()
+ for element in mesh.elements:
+ element_descriptor = element_types.descriptor(element.type)
+
+ all_node_ids = element.node_ids
+ for idx, node_id in enumerate(all_node_ids):
+ if idx >= element_descriptor.n_corner_nodes:
+ all_midside_nodes.add(node_id)
+
+ return all_midside_nodes
+
+
+def get_expected_nodal_skin_results(
+ simulation: MechanicalSimulation,
+ result_name: str,
+ mode: Optional[str],
+ skin_mesh: MeshedRegion,
+ expand_cyclic: bool,
+ elemental_nodal_results: Optional[FieldsContainer] = None,
+):
+ # We have two options to get nodal data:
+ # 1) Request nodal location directly from the operator.
+ # This way we get the nodal data of the full mesh scoped to
+ # the elements in the element scope.
+ # 2) Get the elemental nodal data and then
+ # average it to nodal. We get different results at the boundaries
+ # of the element scope compared to 1). This is because the averaging cannot take into
+ # account the elemental nodal data outside of the element scope. Therefore, the
+ # averaged node data at the boundaries is different.
+ # Currently, the skin workflow requests elemental nodal data and then averages it to nodal,
+ # which corresponds to the case 2 above.
+
+ # If we don't get a elemental_nodal_result_op, this means the result does not support
+ # elemental nodal evaluation. Currently used only for displacement.
+ # In this case we just get the nodal results directly (case 1 above)
+ time_id = 1
+ if elemental_nodal_results is None:
+ assert result_name == "displacement"
+ kwargs = {}
+ if expand_cyclic:
+ kwargs["expand_cyclic"] = True
+ nodal_field = simulation.displacement(set_ids=[time_id], **kwargs)._fc[0]
+ else:
+ if is_equivalent(mode) and result_name == "elastic_strain":
+ # For elastic strain results, the computation of the equivalent
+ # value happens before the averaging.
+ invariant_op = simulation._model.operator(name="eqv_fc")
+ invariant_op.inputs.fields_container(elemental_nodal_results)
+ fields_container = invariant_op.outputs.fields_container()
+ else:
+ fields_container = elemental_nodal_results
+
+ nodal_field = get_expected_nodal_averaged_skin_results(
+ skin_mesh=skin_mesh,
+ solid_elemental_nodal_results=fields_container[0],
+ is_principal_strain_result=is_principal(mode)
+ and result_name == "elastic_strain",
+ )
+
+ if is_principal(mode):
+ invariant_op = simulation._model.operator(name="invariants")
+ invariant_op.inputs.field(nodal_field)
+ nodal_field = invariant_op.outputs.field_eig_1()
+
+ if is_equivalent(mode) and result_name != "elastic_strain":
+ invariant_op = simulation._model.operator(name="eqv")
+ invariant_op.inputs.field(nodal_field)
+ nodal_field = invariant_op.outputs.field()
+ return nodal_field
+
+
@fixture
def static_simulation(static_rst):
return post.load_simulation(
@@ -24,6 +451,35 @@ def static_simulation(static_rst):
)
+@fixture
+def transient_simulation(plate_msup):
+ return post.load_simulation(
+ data_sources=plate_msup,
+ simulation_type=AvailableSimulationTypes.transient_mechanical,
+ )
+
+
+@fixture
+def modal_simulation(modalallkindofcomplexity):
+ return post.load_simulation(
+ data_sources=modalallkindofcomplexity,
+ simulation_type=AvailableSimulationTypes.modal_mechanical,
+ )
+
+
+@fixture
+def harmonic_simulation(complex_model):
+ return post.load_simulation(
+ data_sources=complex_model,
+ simulation_type=AvailableSimulationTypes.harmonic_mechanical,
+ )
+
+
+@fixture
+def cyclic_static_simulation(simple_cyclic):
+ return post.StaticMechanicalSimulation(simple_cyclic)
+
+
def test_simulation_init(static_rst):
simulation = post.StaticMechanicalSimulation(static_rst)
assert simulation is not None
@@ -112,7 +568,10 @@ def test_simulation_split_mesh_by_properties(allkindofcomplexity):
]
)
assert isinstance(meshes, Meshes)
- assert len(meshes) == 16
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ assert len(meshes) == 18
+ else:
+ assert len(meshes) == 16
meshes = simulation.split_mesh_by_properties(
properties={
elemental_properties.material: 1,
@@ -194,14 +653,6 @@ def test_raise_mutually_exclusive(self, static_simulation):
with pytest.raises(ValueError, match="exclusive"):
_ = static_simulation.displacement(load_steps=[1], set_ids=[1])
- def test_raise_node_ids_elemental(self, static_simulation):
- with pytest.raises(
- ValueError, match="Argument 'node_ids' can only be used if 'location'"
- ):
- _ = static_simulation.stress(
- node_ids=[42], location=post.locations.elemental
- )
-
def test_displacement(self, static_simulation):
displacement_x = static_simulation.displacement(
components=["X"], node_ids=[42, 43, 44]
@@ -684,14 +1135,177 @@ def test_skin_layer6(self, static_simulation: post.StaticMechanicalSimulation):
assert len(result.index.mesh_index) == 18
-class TestTransientMechanicalSimulation:
- @fixture
- def transient_simulation(self, plate_msup):
- return post.load_simulation(
- data_sources=plate_msup,
- simulation_type=AvailableSimulationTypes.transient_mechanical,
- )
+# List of element configurations for each simulation type
+element_configurations = {
+ "static_simulation": {
+ 1: [1],
+ 2: [1, 2],
+ 3: [1, 2, 3],
+ 4: [1, 2, 3, 4],
+ 5: [1, 2, 3, 4, 5],
+ 6: [1, 2, 3, 4, 5, 6, 7, 8],
+ },
+ "transient_simulation": {
+ 1: [1],
+ 2: [1, 2],
+ 3: [1, 2, 3],
+ 4: [1, 2, 3, 4],
+ 5: [1, 2, 3, 4, 5],
+ 6: [1, 2, 3, 4, 5, 6, 7, 8],
+ },
+ "modal_simulation": {1: [1], 2: [1, 2], 3: [1, 2, 3], 4: [1, 2, 3, 4, 5, 6, 7, 8]},
+ "harmonic_simulation": {1: [1], 2: [1, 2], 3: [1, 2, 3], 4: list(range(1, 100))},
+ "cyclic_static_simulation": {
+ # Empty dict because element selection is
+ # not supported for cyclic simulations
+ },
+}
+
+# Get a set of all element configurations defined in the dictionary above
+all_configuration_ids = [True] + list(
+ set().union(
+ *[
+ element_configurations.keys()
+ for element_configurations in element_configurations.values()
+ ]
+ )
+)
+
+
+@pytest.mark.parametrize("skin", all_configuration_ids)
+@pytest.mark.parametrize("result_name", ["stress", "elastic_strain", "displacement"])
+@pytest.mark.parametrize("mode", [None, "principal", "equivalent"])
+@pytest.mark.parametrize(
+ "simulation_str",
+ [
+ "static_simulation",
+ "transient_simulation",
+ "modal_simulation",
+ "harmonic_simulation",
+ # Just some very basic tests for the cyclic simulation
+ "cyclic_static_simulation",
+ ],
+)
+def test_skin_extraction(skin, result_name, mode, simulation_str, request):
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1:
+ return
+
+ time_id = 1
+
+ simulation = request.getfixturevalue(simulation_str)
+
+ supports_elemental = True
+ is_cyclic_simulation = simulation_str == "cyclic_static_simulation"
+
+ if is_cyclic_simulation:
+ if result_name == "elastic_strain":
+ # cyclic simulation does not have elastic strain results
+ return
+ if is_equivalent(mode) or is_principal(mode):
+ # Test for equivalent and principal modes not implemented
+ return
+
+ if skin is not True:
+ skin = element_configurations[simulation_str].get(skin)
+ if skin is None:
+ # Return if a element configuration does
+ # not exist for a given simulation type
+ return
+
+ if result_name == "displacement":
+ supports_elemental = False
+ if is_principal(mode) or is_equivalent(mode):
+ # Return for unsupported results
+ return
+
+ if isinstance(skin, list):
+ element_ids = skin
+ else:
+ if isinstance(simulation, post.ModalMechanicalSimulation):
+ # The modal result contains different element types. Here
+ # we just extract the solid elements
+ solid_elements_mesh = simulation.split_mesh_by_properties(
+ {elemental_properties.element_type: element_types.Hex20.value}
+ )
+ if isinstance(solid_elements_mesh, Meshes):
+ element_ids = solid_elements_mesh[0].element_ids
+ else:
+ element_ids = solid_elements_mesh.element_ids
+ skin = element_ids
+ else:
+ element_ids = simulation.mesh.element_ids
+
+ scoping = None
+ if isinstance(skin, list):
+ scoping = Scoping(ids=element_ids, location="elemental")
+
+ fc_elemental_nodal = None
+ if supports_elemental:
+ fc_elemental_nodal = get_elemental_nodal_results(
+ simulation=simulation,
+ result_name=result_name,
+ scoping=scoping,
+ expand_cyclic=is_cyclic_simulation,
+ mode=mode,
+ )
+
+ get_and_check_elemental_skin_results(
+ static_simulation=simulation,
+ fc_elemental_nodal=fc_elemental_nodal,
+ result_name=result_name,
+ mode=mode,
+ element_ids=element_ids,
+ skin=skin,
+ expand_cyclic=is_cyclic_simulation,
+ )
+
+ # Not all the simulation types have the expand_cyclic argument
+ kwargs = {}
+ if is_cyclic_simulation:
+ kwargs["expand_cyclic"] = True
+
+ # For displacements the nodal result
+ # is just called displacement without
+ # the "nodal" suffix
+ nodal_suffix = "_nodal"
+ if result_name == "displacement":
+ nodal_suffix = ""
+
+ result_skin_scoped_nodal = getattr(
+ simulation, f"{result_name}{mode_suffix(mode)}{nodal_suffix}"
+ )(set_ids=[time_id], skin=skin, **kwargs)
+ nodal_skin_field = result_skin_scoped_nodal._fc[0]
+
+ expected_nodal_values_field = get_expected_nodal_skin_results(
+ simulation=simulation,
+ result_name=result_name,
+ mode=mode,
+ skin_mesh=nodal_skin_field.meshed_region,
+ elemental_nodal_results=fc_elemental_nodal,
+ expand_cyclic=is_cyclic_simulation,
+ )
+
+ for node_id in expected_nodal_values_field.scoping.ids:
+ if result_name == "displacement":
+ if node_id not in nodal_skin_field.scoping.ids:
+ # We get the displacement results also for internal
+ # nodes. We skip these nodes here.
+ continue
+ assert np.allclose(
+ expected_nodal_values_field.get_entity_data_by_id(node_id),
+ nodal_skin_field.get_entity_data_by_id(node_id),
+ ), str(node_id)
+
+ # result_skin_scoped_elemental_nodal = getattr(
+ # static_simulation, f"{result_name}{mode_suffix(mode)}"
+ # )(all_sets=True, skin=element_ids)
+
+ # Todo: Elemental nodal does not work
+ # Returns just the element nodal data of the solid
+ # result_skin_scoped_elemental_nodal
+
+class TestTransientMechanicalSimulation:
def test_times_argument(self, transient_simulation, static_simulation):
with pytest.raises(
ValueError, match="Could not find time=0.0 in the simulation."
@@ -1370,13 +1984,6 @@ def test_skin_layer6(
class TestModalMechanicalSimulation:
- @fixture
- def modal_simulation(self, modalallkindofcomplexity):
- return post.load_simulation(
- data_sources=modalallkindofcomplexity,
- simulation_type=AvailableSimulationTypes.modal_mechanical,
- )
-
@fixture
def frame_modal_simulation(self, modalframe):
return post.load_simulation(
@@ -2014,7 +2621,22 @@ def test_stress_skin(self, frame_modal_simulation: post.ModalMechanicalSimulatio
set_ids=[1], skin=list(range(1, 100))
)
assert len(result.columns.set_ids) == 1
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 132
+ assert np.allclose(
+ result.max(axis="element_ids").array,
+ [
+ [
+ 88.09000492095947,
+ 426.211181640625,
+ 747.8219401041666,
+ 30.50066868464152,
+ 412.8089192708333,
+ 109.25983428955078,
+ ]
+ ],
+ )
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 36
assert np.allclose(
result.max(axis="element_ids").array,
@@ -2089,7 +2711,9 @@ def test_strain_skin(self, frame_modal_simulation: post.ModalMechanicalSimulatio
result = frame_modal_simulation.stress_principal_elemental(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 132
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 36
else:
assert len(result.index.mesh_index) == 110
@@ -2135,13 +2759,6 @@ def test_strain_skin3(self, frame_modal_simulation: post.ModalMechanicalSimulati
class TestHarmonicMechanicalSimulation:
- @fixture
- def harmonic_simulation(self, complex_model):
- return post.load_simulation(
- data_sources=complex_model,
- simulation_type=AvailableSimulationTypes.harmonic_mechanical,
- )
-
def test_cyclic(self, simple_cyclic):
simulation = post.HarmonicMechanicalSimulation(simple_cyclic)
result = simulation.displacement(expand_cyclic=False)
@@ -2764,7 +3381,10 @@ def test_stress_skin(self, harmonic_simulation: post.HarmonicMechanicalSimulatio
result = harmonic_simulation.stress_elemental(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 360
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 122
else:
assert len(result.index.mesh_index) == 192
@@ -2772,7 +3392,10 @@ def test_stress_skin(self, harmonic_simulation: post.HarmonicMechanicalSimulatio
result = harmonic_simulation.stress_eqv_von_mises_nodal(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 1080
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 520
else:
assert len(result.index.mesh_index) == 530
@@ -2799,7 +3422,10 @@ def test_strain_skin(self, harmonic_simulation: post.HarmonicMechanicalSimulatio
result = harmonic_simulation.stress_principal_elemental(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 360
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 122
else:
assert len(result.index.mesh_index) == 192
@@ -2807,14 +3433,24 @@ def test_strain_skin(self, harmonic_simulation: post.HarmonicMechanicalSimulatio
result = harmonic_simulation.elastic_strain_eqv_von_mises_nodal(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 1080
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 520
else:
assert len(result.index.mesh_index) == 530
assert len(result.columns.set_ids) == 1
- assert np.allclose(
- result.select(complex=0).max(axis="node_ids").array, [1.34699501e-06]
- )
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert np.allclose(
+ result.select(complex=0).max(axis="node_ids").array,
+ [1.37163319e-06],
+ )
+ else:
+ assert np.allclose(
+ result.select(complex=0).max(axis="node_ids").array,
+ [1.34699501e-06],
+ )
result = harmonic_simulation.elastic_strain_eqv_von_mises_nodal(
set_ids=[1], skin=True
)
@@ -2826,13 +3462,16 @@ def test_strain_skin(self, harmonic_simulation: post.HarmonicMechanicalSimulatio
result = harmonic_simulation.elastic_strain_principal_nodal(
set_ids=[1], skin=list(range(1, 100))
)
- if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
+ if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_0:
+ assert len(result.index.mesh_index) == 1080
+ elif SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 520
else:
assert len(result.index.mesh_index) == 530
assert len(result.columns.set_ids) == 1
result = harmonic_simulation.elastic_strain_eqv_von_mises_elemental(
- set_ids=[1], skin=True
+ set_ids=[1],
+ skin=True,
)
if SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_1:
assert len(result.index.mesh_index) == 1394
@@ -2847,3 +3486,691 @@ def test_elemental_ns_on_nodal_result(modal_frame):
disp = simulation.displacement(named_selections=["BAR_1"])
assert disp.index[0].name == ref_labels.node_ids
assert len(disp.index[0].values) == 1370
+
+
+@dataclasses.dataclass
+class ReferenceDataItem:
+ # Node ids of all the nodes. Note: The list contains duplicates
+ # for the nodes at the body interfaces
+ node_ids: list[int]
+ # The data for each node. The order of the data corresponds to the order of the node ids
+ data: list[float]
+
+
+@dataclasses.dataclass
+class ReferenceData:
+ # Data Read from Reference CSV files
+ # Reference data for all bodies combined
+ combined: ReferenceDataItem
+ # Reference data per body
+ per_id: dict[str, ReferenceDataItem]
+
+
+def get_node_and_data_map(
+ mesh: MeshedRegion, csv_file_path: pathlib.Path
+) -> ReferenceDataItem:
+ # Get the data from a single csv file and find the corresponding node in the dpf
+ # mesh (by coordinates). Note: It looks like the node ids in the csv match the node
+ # labels in the dpf mesh. I keep the search for now, but we could probably use the node
+ # labels in the csv file directly.
+ with open(csv_file_path) as csv_file:
+ reader = csv.reader(csv_file, delimiter="\t")
+ next(reader, None)
+ node_coordinates = mesh.nodes.coordinates_field
+ node_coordinates_csv = []
+ data_rows = []
+ for idx, row in enumerate(reader):
+ node_coordinates_csv.append(
+ np.array([float(row[1]), float(row[2]), float(row[3])])
+ )
+ data_rows.append(float(row[4]))
+
+ node_coordinates_dpf = node_coordinates.data
+ node_ids = []
+ for row_index, csv_coord in enumerate(node_coordinates_csv):
+ index = np.where(
+ np.isclose(node_coordinates_dpf, csv_coord, rtol=1e-3).all(axis=1)
+ )[0]
+ if index.size > 0:
+ assert index.size == 1
+ node_id = mesh.nodes.scoping.ids[index[0]]
+
+ node_ids.append(node_id)
+ else:
+ raise RuntimeError(
+ f"Node not found in dpf mesh. Node coordinate: {csv_coord}"
+ )
+
+ return ReferenceDataItem(node_ids, data_rows)
+
+
+def get_ref_data_from_csv(mesh: MeshedRegion, csv_file_name: ReferenceCsvFiles):
+ combined_ref_data = get_node_and_data_map(mesh, csv_file_name.combined)
+ per_id_ref_data = {}
+ for mat_id, csv_file in csv_file_name.per_id.items():
+ per_id_ref_data[mat_id] = get_node_and_data_map(mesh, csv_file)
+ return ReferenceData(combined_ref_data, per_id_ref_data)
+
+
+def get_bodies_in_scoping(meshed_region: MeshedRegion, scoping: Scoping):
+ elemental_scoping = scoping
+ if scoping.location == locations.nodal:
+ elemental_scoping = operators.scoping.transpose(
+ mesh_scoping=scoping,
+ meshed_region=meshed_region,
+ inclusive=0,
+ requested_location=locations.elemental,
+ ).eval()
+
+ mat_field = meshed_region.property_field("mat")
+ rescoped_mat_field_op = dpf.operators.scoping.rescope_property_field(
+ fields=mat_field,
+ mesh_scoping=elemental_scoping,
+ )
+
+ rescoped_mat_field = rescoped_mat_field_op.outputs.fields_as_property_field()
+
+ return list(set(rescoped_mat_field.data))
+
+
+def get_ref_result_per_node_and_material(
+ mesh: MeshedRegion, reference_csv_files: ReferenceCsvFiles
+):
+ # Get the reference data from the csv files.
+ # Returns a dictionary with node_id and mat_id as nested keys.
+ # Goes through the nodes and checks which material id contains the node.
+ # The combined data is only used for consistency checks: It is checked
+ # that the combined data contains the values of each material.
+ ref_data = get_ref_data_from_csv(mesh, reference_csv_files)
+
+ node_id_to_row_index_map_combined = {}
+ for idx, node_id in enumerate(ref_data.combined.node_ids):
+ if node_id is not None:
+ if node_id in node_id_to_row_index_map_combined:
+ node_id_to_row_index_map_combined[node_id].append(idx)
+ else:
+ node_id_to_row_index_map_combined[node_id] = [idx]
+
+ # Ensure we have found each node in the input mesh
+ assert sorted(node_id_to_row_index_map_combined.keys()) == sorted(
+ mesh.nodes.scoping.ids
+ )
+
+ data_per_node_and_material = {}
+
+ for node_id, combined_row_indices in node_id_to_row_index_map_combined.items():
+ multiplicity_of_node = len(combined_row_indices)
+ material_wise_data = {}
+ for mat_id, ref_data_item in ref_data.per_id.items():
+ if node_id in ref_data_item.node_ids:
+ row_index = ref_data_item.node_ids.index(node_id)
+ material_wise_data[mat_id] = ref_data_item.data[row_index]
+
+ if len(material_wise_data) != multiplicity_of_node:
+ raise RuntimeError(
+ f"Inconsistent combined and per material data for node id: {node_id}"
+ f" number of entries in combined data: {multiplicity_of_node}, "
+ f" number of entries in per material data: {len(material_wise_data)}"
+ )
+
+ for mat_id, data_per_material in material_wise_data.items():
+ # Check that the data per material is close to one of the
+ # data for one of the combined nodes
+ assert np.isclose(
+ data_per_material,
+ np.array(ref_data.combined.data)[combined_row_indices],
+ ).any(), f"{node_id}, {mat_id}"
+
+ data_per_node_and_material[node_id] = material_wise_data
+
+ return data_per_node_and_material
+
+
+def get_ref_per_body_results_mechanical(
+ reference_csv_files: ReferenceCsvFiles, mesh: MeshedRegion
+):
+ return get_ref_result_per_node_and_material(mesh, reference_csv_files)
+
+
+def get_per_body_results_solid(
+ simulation: StaticMechanicalSimulation,
+ result_type: str,
+ mat_ids: list[int],
+ components: list[str],
+ additional_scoping: Optional[Scoping],
+):
+ if additional_scoping:
+ transpose_scoping = operators.scoping.transpose()
+ transpose_scoping.inputs.mesh_scoping(additional_scoping)
+ transpose_scoping.inputs.meshed_region(simulation.mesh._meshed_region)
+ transpose_scoping.inputs.inclusive(1)
+ transpose_scoping.inputs.requested_location(locations.elemental)
+
+ elemental_scoping = transpose_scoping.eval()
+
+ # Split the mesh by bodies to get an elemental scoping.
+ mesh = simulation.mesh._meshed_region
+ split_by_property_op = operators.scoping.split_on_property_type()
+ split_by_property_op.inputs.mesh(mesh)
+ split_by_property_op.inputs.label1("mat")
+
+ body_scopings = split_by_property_op.eval()
+ elemental_nodal_result = getattr(simulation, result_type)(components=components)._fc
+
+ solid_mesh = elemental_nodal_result[0].meshed_region
+ all_values = {}
+
+ for mat_id in mat_ids:
+ body_scoping = body_scopings.get_scoping({"mat": mat_id})
+ assert body_scoping.location == locations.elemental
+
+ if additional_scoping is not None:
+ scoping_intersect_op = dpf.operators.scoping.intersect()
+ scoping_intersect_op.inputs.scopingA.connect(body_scoping)
+ scoping_intersect_op.inputs.scopingB.connect(elemental_scoping)
+
+ intersected_scoping = scoping_intersect_op.eval()
+ if len(intersected_scoping.ids) == 0:
+ continue
+ else:
+ intersected_scoping = body_scoping
+
+ # Rescope the elemental nodal results to the body
+ # and the optional additional scoping
+ rescope_op = operators.scoping.rescope_fc()
+ rescope_op.inputs.mesh_scoping(intersected_scoping)
+ rescope_op.inputs.fields_container(elemental_nodal_result)
+
+ to_nodal_op = dpf.operators.averaging.to_nodal_fc()
+ to_nodal_op.inputs.fields_container(rescope_op.outputs.fields_container)
+
+ nodal_fc = None
+ if additional_scoping.location == locations.nodal:
+ rescope_nodal_op = operators.scoping.rescope_fc()
+ rescope_nodal_op.inputs.fields_container(
+ to_nodal_op.outputs.fields_container()
+ )
+ rescope_nodal_op.inputs.mesh_scoping(additional_scoping)
+ nodal_fc = rescope_nodal_op.eval()
+ else:
+ nodal_fc = to_nodal_op.outputs.fields_container()
+ assert len(nodal_fc) == 1
+ nodal_field = nodal_fc[0]
+
+ values_per_mat = {}
+ for node_id in nodal_field.scoping.ids:
+ entity_data = nodal_field.get_entity_data_by_id(node_id)
+ assert len(entity_data) == 1
+ values_per_mat[node_id] = entity_data[0]
+
+ all_values[mat_id] = values_per_mat
+
+ # Get all node_ids so it is easy to build
+ # the dictionary with nested labels [node_id][mat_id]
+ all_node_ids = set()
+ for mat_id in mat_ids:
+ all_node_ids.update(all_values[mat_id].keys())
+
+ # Build nested dictionary with node_id and mat_id as nested keys.
+ expected_results = {}
+ for node_id in all_node_ids:
+ expected_results_per_node = {}
+ for mat_id in mat_ids:
+ if node_id in all_values[mat_id]:
+ expected_results_per_node[mat_id] = all_values[mat_id][node_id]
+ expected_results[node_id] = expected_results_per_node
+ return expected_results
+
+
+def get_ref_per_body_results_skin(
+ simulation: StaticMechanicalSimulation,
+ result_type: str,
+ mat_ids: list[int],
+ components: list[str],
+ skin_mesh: MeshedRegion,
+ additional_scoping: Optional[Scoping],
+):
+ if additional_scoping:
+ transpose_scoping = operators.scoping.transpose()
+ transpose_scoping.inputs.mesh_scoping(additional_scoping)
+ transpose_scoping.inputs.meshed_region(simulation.mesh._meshed_region)
+ transpose_scoping.inputs.inclusive(1)
+ transpose_scoping.inputs.requested_location(locations.elemental)
+
+ elemental_scoping = transpose_scoping.eval()
+
+ # Get the reference skin results.
+ # Rescope the skin mesh to each body and extract the corresponding results.
+
+ # Split the mesh by bodies to get an elemental scoping.
+ mesh = simulation.mesh._meshed_region
+ split_by_property_op = operators.scoping.split_on_property_type()
+ split_by_property_op.inputs.mesh(mesh)
+ split_by_property_op.inputs.label1("mat")
+
+ body_scopings = split_by_property_op.eval()
+ elemental_nodal_result = getattr(simulation, result_type)(components=components)._fc
+
+ skin_values = {}
+
+ solid_mesh = elemental_nodal_result[0].meshed_region
+
+ for mat_id in mat_ids:
+ body_scoping = body_scopings.get_scoping({"mat": mat_id})
+ assert body_scoping.location == locations.elemental
+
+ if additional_scoping is not None:
+ scoping_intersect_op = (
+ dpf.operators.scoping.intersect()
+ ) # operator instantiation
+ scoping_intersect_op.inputs.scopingA.connect(body_scoping)
+ scoping_intersect_op.inputs.scopingB.connect(elemental_scoping)
+
+ intersected_scoping = scoping_intersect_op.eval()
+ if len(intersected_scoping.ids) == 0:
+ continue
+ else:
+ intersected_scoping = body_scoping
+
+ # Rescope the elemental nodal results to the body
+ # The elemental nodal results are used later to get the nodal
+ # results
+ rescope_op = operators.scoping.rescope_fc()
+ rescope_op.inputs.mesh_scoping(intersected_scoping)
+ rescope_op.inputs.fields_container(elemental_nodal_result)
+
+ # Rescope the solid mesh
+ rescope_mesh_op_solid = operators.mesh.from_scoping()
+ rescope_mesh_op_solid.inputs.mesh(solid_mesh)
+ rescope_mesh_op_solid.inputs.scoping(intersected_scoping)
+
+ rescoped_solid_mesh = rescope_mesh_op_solid.eval()
+
+ # Get the nodal scoping, which is needed to rescope
+ # the skin mesh.
+ transpose_scoping = operators.scoping.transpose()
+ transpose_scoping.inputs.mesh_scoping(intersected_scoping)
+ transpose_scoping.inputs.meshed_region(solid_mesh)
+ transpose_scoping.inputs.inclusive(1)
+
+ nodal_scoping = transpose_scoping.eval()
+
+ # Rescope the skin mesh
+ rescope_mesh_op_skin = operators.mesh.from_scoping()
+ rescope_mesh_op_skin.inputs.mesh(skin_mesh)
+ rescope_mesh_op_skin.inputs.scoping(nodal_scoping)
+ rescope_mesh_op_skin.inputs.inclusive(0)
+
+ for field in elemental_nodal_result:
+ field.meshed_region = rescoped_solid_mesh
+
+ nodal_field = get_expected_nodal_skin_results(
+ simulation=simulation,
+ result_name=result_type,
+ mode=None,
+ skin_mesh=rescope_mesh_op_skin.eval(),
+ expand_cyclic=False,
+ elemental_nodal_results=rescope_op.eval(),
+ )
+
+ if additional_scoping and additional_scoping.location == locations.nodal:
+ rescope_to_add_scope = operators.scoping.rescope()
+ rescope_to_add_scope.inputs.mesh_scoping(additional_scoping)
+ rescope_to_add_scope.inputs.fields(nodal_field)
+ nodal_field = rescope_to_add_scope.outputs.fields_as_field()
+
+ skin_values_per_mat = {}
+ for node_id in nodal_field.scoping.ids:
+ entity_data = nodal_field.get_entity_data_by_id(node_id)
+ assert len(entity_data) == 1
+ skin_values_per_mat[node_id] = entity_data[0]
+
+ skin_values[mat_id] = skin_values_per_mat
+
+ # Get all node_ids so it is easy to build
+ # the dictionary with nested labels [node_id][mat_id]
+ all_node_ids = set()
+ for mat_id in mat_ids:
+ all_node_ids.update(skin_values[mat_id].keys())
+
+ # Build nested dictionary with node_id and mat_id as nested keys.
+ expected_results = {}
+ for node_id in all_node_ids:
+ expected_results_per_node = {}
+ for mat_id in mat_ids:
+ if node_id in skin_values[mat_id]:
+ expected_results_per_node[mat_id] = skin_values[mat_id][node_id]
+ expected_results[node_id] = expected_results_per_node
+ return expected_results
+
+
+default_per_body_averaging_config = AveragingConfig(
+ body_defining_properties=[
+ elemental_properties.material,
+ "mapdl_element_type_id",
+ ],
+ average_per_body=True,
+)
+
+
+@pytest.mark.parametrize("is_skin", [False, True])
+# Note: Selections are only tested on the more complex model (average_per_body_complex_multi_body)
+@pytest.mark.parametrize(
+ "selection_name",
+ [
+ None,
+ # Use the named selection (nodal selection) in the model to do the selection.
+ "SELECTION",
+ # todo: add test with single node
+ # Use a custom selection (based on element ids) to do the selection.
+ "Custom",
+ # Use the named selection (nodal selection) in the model, but convert it to
+ # node_ids to test the node_ids argument of the results api.
+ "SELECTION_CONVERT_TO_NODAL",
+ # Use the named selection (nodal selection) in the model, but convert it to
+ # element_ids to test the element_ids argument of the results api.
+ "SELECTION_CONVERT_TO_ELEMENTAL",
+ ],
+)
+@pytest.mark.parametrize("result", ["stress", "elastic_strain"])
+@pytest.mark.parametrize(
+ "result_file_str, ref_files",
+ [
+ (r"average_per_body_two_cubes", "average_per_body_two_cubes_ref"),
+ (
+ r"average_per_body_complex_multi_body",
+ "average_per_body_complex_multi_body_ref",
+ ),
+ ],
+)
+def test_averaging_per_body_nodal(
+ request, is_skin, result, result_file_str, ref_files, selection_name
+):
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ # average per body not supported before 9.0
+ return
+
+ ref_files = request.getfixturevalue(ref_files)
+
+ result_file = request.getfixturevalue(result_file_str)
+
+ is_custom_selection = selection_name in [
+ "Custom",
+ "SELECTION_CONVERT_TO_NODAL",
+ "SELECTION_CONVERT_TO_ELEMENTAL",
+ ]
+ simulation: StaticMechanicalSimulation = post.load_simulation(
+ data_sources=result_file,
+ simulation_type=AvailableSimulationTypes.static_mechanical,
+ )
+ mesh = simulation.mesh._meshed_region
+
+ expected_nodal_scope = None
+ if is_custom_selection:
+ if selection_name == "Custom":
+ # Element scope that corresponds to one body
+ element_scope = [25, 26, 32, 31, 27, 28, 33, 34, 29, 30, 35, 36]
+ custom_scoping = Scoping(ids=element_scope, location=locations.elemental)
+ transpose_op = operators.scoping.transpose()
+ transpose_op.inputs.requested_location(locations.nodal)
+ transpose_op.inputs.inclusive(0)
+ transpose_op.inputs.mesh_scoping(custom_scoping)
+ transpose_op.inputs.meshed_region(mesh)
+ expected_nodal_scope = transpose_op.eval().ids
+ else:
+ named_selection_scope = mesh.named_selection("SELECTION")
+ assert named_selection_scope.location == locations.nodal
+ expected_nodal_scope = named_selection_scope.ids
+ transpose_op = operators.scoping.transpose()
+ transpose_op.inputs.requested_location(locations.elemental)
+ transpose_op.inputs.inclusive(0)
+ transpose_op.inputs.mesh_scoping(named_selection_scope)
+ transpose_op.inputs.meshed_region(mesh)
+ custom_elemental_scoping = transpose_op.eval()
+
+ if selection_name == "SELECTION_CONVERT_TO_ELEMENTAL":
+ custom_scoping = Scoping(
+ ids=custom_elemental_scoping.ids, location=locations.elemental
+ )
+
+ if selection_name == "SELECTION_CONVERT_TO_NODAL":
+ custom_scoping = named_selection_scope
+
+ components = ["XX"]
+
+ named_selections = None
+ selection = None
+ kwargs = {}
+ if selection_name is not None:
+ if is_custom_selection:
+ if result_file_str != "average_per_body_complex_multi_body":
+ # Test custom selection only with complex case
+ return
+
+ if custom_scoping.location == locations.nodal:
+ kwargs["node_ids"] = custom_scoping.ids
+ else:
+ kwargs["element_ids"] = custom_scoping.ids
+ else:
+ kwargs["named_selections"] = [selection_name]
+ res = simulation._get_result(
+ base_name=operator_map[result],
+ location=locations.nodal,
+ category=ResultCategory.matrix,
+ skin=is_skin,
+ averaging_config=default_per_body_averaging_config,
+ components=components,
+ **kwargs,
+ )
+
+ named_selection = None
+ additional_scoping = None
+ if selection_name is None:
+ mat_field = mesh.property_field("mat")
+ bodies_in_selection = list(set(mat_field.data))
+
+ else:
+ if is_custom_selection:
+ additional_scoping = custom_scoping
+ else:
+ additional_scoping = mesh.named_selection(selection_name)
+ assert additional_scoping.location == "Nodal"
+ named_selection = additional_scoping
+
+ # Get only the bodies that are present in the named selection.
+ # Only these bodies are present in the dpf result.
+ bodies_in_selection = get_bodies_in_scoping(
+ meshed_region=simulation.mesh._meshed_region,
+ scoping=additional_scoping,
+ )
+
+ if is_skin:
+ # Compute reference data on skin (by rescoping results on skin)
+ ref_data = get_ref_per_body_results_skin(
+ simulation=simulation,
+ result_type=result,
+ mat_ids=bodies_in_selection,
+ components=components,
+ skin_mesh=res._fc[0].meshed_region,
+ additional_scoping=additional_scoping,
+ )
+ else:
+ # Cannot take reference for Mechanical because the named selection
+ # splits a body and therefore the values at the boundaries
+ # of the named selection are not the same as in Mechanical
+ # Instead the elemental nodal data is rescoped to the additional_scoping and
+ # then averaged on that scoping.
+ if named_selection is not None or is_custom_selection:
+ ref_data = get_per_body_results_solid(
+ simulation=simulation,
+ result_type=result,
+ mat_ids=bodies_in_selection,
+ components=components,
+ additional_scoping=additional_scoping,
+ )
+ else:
+ # get reference data from mechanical
+ ref_data = get_ref_per_body_results_mechanical(ref_files[result], mesh)
+
+ def get_expected_label_space_by_mat_id(mat_id: int):
+ # mapdl_element_type_id is not part of the label space before DPF 9.1
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_1:
+ return {
+ elemental_properties.material: mat_id,
+ "time": 1,
+ }
+ return {
+ elemental_properties.material: mat_id,
+ "mapdl_element_type_id": mat_id,
+ "time": 1,
+ }
+
+ label_spaces_by_mat_id = {}
+ for idx in range(len(bodies_in_selection)):
+ label_space = res._fc.get_label_space(idx)
+ label_spaces_by_mat_id[label_space["mat"]] = label_space
+
+ assert len(label_spaces_by_mat_id) == len(bodies_in_selection)
+ for mat_id in bodies_in_selection:
+ assert label_spaces_by_mat_id[mat_id] == get_expected_label_space_by_mat_id(
+ mat_id
+ )
+
+ assert res._fc.get_label_space(len(bodies_in_selection)) == {}
+
+ for node_id in ref_data:
+ for mat_id in ref_data[node_id]:
+ mat_id_int = int(mat_id)
+
+ if selection_name is not None and mat_id_int not in bodies_in_selection:
+ continue
+ field = res._fc.get_field({"mat": mat_id_int})
+
+ nodal_value = None
+ if expected_nodal_scope is not None:
+ assert set(field.scoping.ids).issubset(set(expected_nodal_scope)), set(
+ field.scoping.ids
+ ).difference(set(expected_nodal_scope))
+
+ if node_id in expected_nodal_scope:
+ nodal_value = field.get_entity_data_by_id(node_id)
+ else:
+ nodal_value = field.get_entity_data_by_id(node_id)
+
+ if nodal_value is not None:
+ assert np.isclose(
+ nodal_value[0], ref_data[node_id][mat_id], rtol=1e-3
+ ), f"{result}, {mat_id}, {node_id}"
+
+
+@pytest.mark.parametrize("is_skin", [False, True])
+@pytest.mark.parametrize("named_selection_name", [None, "SELECTION"])
+@pytest.mark.parametrize("result", ["stress", "elastic_strain"])
+@pytest.mark.parametrize(
+ "result_file",
+ [
+ r"average_per_body_two_cubes",
+ r"average_per_body_complex_multi_body",
+ ],
+)
+def test_averaging_per_body_elemental(
+ request, is_skin, result, result_file, named_selection_name
+):
+ # Expectation is that elemental results are not affected by the average per body flag.
+
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ # average per body not supported before 9.0
+ return
+
+ result_file = request.getfixturevalue(result_file)
+ rst_file = pathlib.Path(result_file)
+ simulation: StaticMechanicalSimulation = post.load_simulation(
+ data_sources=rst_file,
+ simulation_type=AvailableSimulationTypes.static_mechanical,
+ )
+
+ components = ["XX"]
+
+ named_selections = None
+ if named_selection_name is not None:
+ named_selections = [named_selection_name]
+
+ kwargs = {
+ "base_name": operator_map[result],
+ "location": locations.elemental,
+ "category": ResultCategory.matrix,
+ "skin": is_skin,
+ "components": components,
+ "named_selections": named_selections,
+ }
+ res_per_body_fc = simulation._get_result(
+ **kwargs, averaging_config=default_per_body_averaging_config
+ )._fc
+
+ res_across_bodies_fc = simulation._get_result(
+ **kwargs, averaging_config=AveragingConfig()
+ )._fc
+
+ assert len(res_across_bodies_fc) == 1
+ res_across_bodies_field = res_across_bodies_fc[0]
+
+ mat_property_field = res_across_bodies_field.meshed_region.property_field("mat")
+ for element_id in res_across_bodies_field.scoping.ids:
+ mat_id_arr = mat_property_field.get_entity_data_by_id(element_id)
+ assert len(mat_id_arr) == 1
+
+ res_per_body_field = res_per_body_fc.get_field({"mat": mat_id_arr[0]})
+ assert res_across_bodies_field.get_entity_data_by_id(
+ element_id
+ ) == res_per_body_field.get_entity_data_by_id(element_id)
+
+
+@pytest.mark.parametrize("is_skin", [False, True])
+@pytest.mark.parametrize("average_per_body", [False, True])
+@pytest.mark.parametrize("requested_location", ["Nodal", "Elemental"])
+def test_build_selection(
+ average_per_body_complex_multi_body, average_per_body, is_skin, requested_location
+):
+ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_9_0:
+ # Logic has changed with server 9.0
+ return
+
+ rst_file = pathlib.Path(average_per_body_complex_multi_body)
+ simulation: StaticMechanicalSimulation = post.load_simulation(
+ data_sources=rst_file,
+ simulation_type=AvailableSimulationTypes.static_mechanical,
+ )
+
+ scoping = Scoping(
+ location=locations.elemental,
+ ids=[25, 26, 32, 31, 27, 28, 33, 34, 29, 30, 35, 36],
+ )
+
+ selection, rescoping = simulation._build_selection(
+ base_name="S",
+ category=ResultCategory.matrix,
+ location=requested_location,
+ skin=is_skin,
+ average_per_body=average_per_body,
+ selection=None,
+ set_ids=None,
+ times=None,
+ all_sets=True,
+ element_ids=scoping.ids,
+ )
+ selection_wf = selection.spatial_selection._selection
+ if selection.spatial_selection.requires_mesh:
+ selection_wf.connect(_WfNames.initial_mesh, simulation.mesh._meshed_region)
+ scoping_from_selection = selection_wf.get_output(_WfNames.scoping, Scoping)
+
+ if is_skin or average_per_body:
+ # If request is for skin or average per body, the location should be elemental
+ # because force_elemental_nodal is True
+ assert scoping_from_selection.location == locations.elemental
+ assert set(scoping_from_selection.ids) == set(scoping.ids)
+ else:
+ assert scoping_from_selection.location == requested_location
+ if requested_location == locations.nodal:
+ assert len(scoping_from_selection.ids) == 36
+ else:
+ assert set(scoping_from_selection.ids) == set(scoping.ids)